ceilometer-2014.1.5/0000775000567000056700000000000012540643223015235 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/babel.cfg0000664000567000056700000000002112540642613016756 0ustar jenkinsjenkins00000000000000[python: **.py] ceilometer-2014.1.5/setup-test-env.sh0000775000567000056700000000174312540642615020510 0ustar jenkinsjenkins00000000000000#!/bin/bash set -e function clean_exit(){ local error_code="$?" rm -rf ${MONGO_DATA} kill $(jobs -p) return $error_code } # Setup MongoDB test server MONGO_DATA=`mktemp -d /tmp/CEILO-MONGODB-XXXXX` MONGO_PORT=29000 trap "clean_exit" EXIT mkfifo ${MONGO_DATA}/out export PATH=${PATH:+$PATH:}/sbin:/usr/sbin if ! which mongod >/dev/null 2>&1 then echo "Could not find mongod command" 1>&2 exit 1 fi mongod --maxConns 32 --nojournal --noprealloc --smallfiles --quiet --noauth --port ${MONGO_PORT} --dbpath "${MONGO_DATA}" --bind_ip localhost --config /dev/null &>${MONGO_DATA}/out & # Wait for Mongo to start listening to connections while read line do echo "$line" | grep -q "waiting for connections on port ${MONGO_PORT}" && break done < ${MONGO_DATA}/out # Read the fifo for ever otherwise mongod would block cat ${MONGO_DATA}/out > /dev/null & export CEILOMETER_TEST_MONGODB_URL="mongodb://localhost:${MONGO_PORT}/ceilometer" # Yield execution to venv command $* ceilometer-2014.1.5/.coveragerc0000664000567000056700000000020112540642613017351 0ustar jenkinsjenkins00000000000000[run] branch = True source = ceilometer omit = ceilometer/tests/*, ceilometer/openstack/common/* [report] ignore-errors = True ceilometer-2014.1.5/bin/0000775000567000056700000000000012540643223016005 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/bin/ceilometer-rpc-zmq-receiver0000775000567000056700000000330712540642615023263 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet eventlet.monkey_patch() import contextlib import os import sys # If ../ceilometer/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'ceilometer', '__init__.py')): sys.path.insert(0, POSSIBLE_TOPDIR) from oslo.config import cfg from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import rpc from ceilometer.openstack.common.rpc import impl_zmq CONF = cfg.CONF CONF.register_opts(rpc.rpc_opts) CONF.register_opts(impl_zmq.zmq_opts) def main(): CONF(sys.argv[1:], project='ceilometer') logging.setup("ceilometer") with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: reactor.consume_in_thread() reactor.wait() if __name__ == '__main__': main() ceilometer-2014.1.5/bin/ceilometer-test-event.py0000775000567000056700000000477212540642615022624 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright © 2013 Rackspace Hosting. # # Author: Monsyne Dragon # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Command line tool help you debug your event definitions. Feed it a list of test notifications in json format, and it will show you what events will be generated. """ import json import sys from oslo.config import cfg from stevedore import extension from ceilometer.event import converter from ceilometer import service cfg.CONF.register_cli_opts([ cfg.StrOpt('input-file', short='i', help='File to read test notifications from.' ' (Containing a json list of notifications.)' ' defaults to stdin.'), cfg.StrOpt('output-file', short='o', help='File to write results to. Defaults to stdout.'), ]) TYPES = {1: 'text', 2: 'int', 3: 'float', 4: 'datetime'} service.prepare_service() config_file = converter.get_config_file() output_file = cfg.CONF.output_file input_file = cfg.CONF.input_file if output_file is None: out = sys.stdout else: out = open(output_file, 'w') if input_file is None: notifications = json.load(sys.stdin) else: with open(input_file, 'r') as f: notifications = json.load(f) out.write("Definitions file: %s\n" % config_file) out.write("Notifications tested: %s\n" % len(notifications)) event_converter = converter.setup_events( extension.ExtensionManager( namespace='ceilometer.event.trait_plugin')) for notification in notifications: event = event_converter.to_event(notification) if event is None: out.write("Dropped notification: %s\n" % notification['message_id']) continue out.write("Event: %s at %s\n" % (event.event_name, event.generated)) for trait in event.traits: dtype = TYPES[trait.dtype] out.write(" Trait: name: %s, type: %s, value: %s\n" % ( trait.name, dtype, trait.value)) ceilometer-2014.1.5/CONTRIBUTING.rst0000664000567000056700000000104012540642615017675 0ustar jenkinsjenkins00000000000000If you would like to contribute to the development of OpenStack, you must follow the steps in the "If you're a developer, start here" section of this page: http://wiki.openstack.org/HowToContribute Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: http://wiki.openstack.org/GerritWorkflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/ceilometer ceilometer-2014.1.5/ceilometer/0000775000567000056700000000000012540643223017365 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/nova_client.py0000664000567000056700000000734012540642615022250 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Author: John Tran # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import novaclient from novaclient.v1_1 import client as nova_client from oslo.config import cfg from ceilometer.openstack.common import log cfg.CONF.import_group('service_credentials', 'ceilometer.service') LOG = log.getLogger(__name__) def logged(func): @functools.wraps(func) def with_logging(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: LOG.exception(e) raise return with_logging class Client(object): """A client which gets information via python-novaclient.""" def __init__(self): """Initialize a nova client object.""" conf = cfg.CONF.service_credentials tenant = conf.os_tenant_id or conf.os_tenant_name self.nova_client = nova_client.Client( username=conf.os_username, api_key=conf.os_password, project_id=tenant, auth_url=conf.os_auth_url, region_name=conf.os_region_name, endpoint_type=conf.os_endpoint_type, cacert=conf.os_cacert, insecure=conf.insecure, no_cache=True) def _with_flavor_and_image(self, instances): for instance in instances: self._with_flavor(instance) self._with_image(instance) return instances def _with_flavor(self, instance): fid = instance.flavor['id'] try: flavor = self.nova_client.flavors.get(fid) except novaclient.exceptions.NotFound: flavor = None attr_defaults = [('name', 'unknown-id-%s' % fid), ('vcpus', 0), ('ram', 0), ('disk', 0), ('ephemeral', 0)] for attr, default in attr_defaults: if not flavor: instance.flavor[attr] = default continue instance.flavor[attr] = getattr(flavor, attr, default) def _with_image(self, instance): try: iid = instance.image['id'] except TypeError: instance.image = None instance.kernel_id = None instance.ramdisk_id = None return try: image = self.nova_client.images.get(iid) except novaclient.exceptions.NotFound: instance.image['name'] = 'unknown-id-%s' % iid instance.kernel_id = None instance.ramdisk_id = None return instance.image['name'] = getattr(image, 'name') image_metadata = getattr(image, 'metadata', None) for attr in ['kernel_id', 'ramdisk_id']: ameta = image_metadata.get(attr) if image_metadata else None setattr(instance, attr, ameta) @logged def instance_get_all_by_host(self, hostname): """Returns list of instances on particular host.""" search_opts = {'host': hostname, 'all_tenants': True} return self._with_flavor_and_image(self.nova_client.servers.list( detailed=True, search_opts=search_opts)) @logged def floating_ip_get_all(self): """Returns all floating ips.""" return self.nova_client.floating_ips.list() ceilometer-2014.1.5/ceilometer/hardware/0000775000567000056700000000000012540643223021162 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/hardware/plugin.py0000664000567000056700000001004112540642615023032 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base class for plugins used by the hardware agent.""" import abc import itertools import six from ceilometer.central import plugin from ceilometer.hardware import inspector as insloader from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import network_utils LOG = log.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class HardwarePollster(plugin.CentralPollster): """Base class for plugins that support the polling API.""" CACHE_KEY = None INSPECT_METHOD = None def __init__(self): super(HardwarePollster, self).__init__() self.inspectors = {} def get_samples(self, manager, cache, resources=[]): """Return an iterable of Sample instances from polling the resources. :param manager: The service manager invoking the plugin :param cache: A dictionary for passing data between plugins :param resources: end point to poll data from """ h_cache = cache.setdefault(self.CACHE_KEY, {}) sample_iters = [] for res in resources: parsed_url = network_utils.urlsplit(res) inspector = self._get_inspector(parsed_url) func = getattr(inspector, self.INSPECT_METHOD) try: # Call hardware inspector to poll for the data i_cache = h_cache.setdefault(res, {}) if res not in i_cache: i_cache[res] = list(func(parsed_url)) # Generate samples if i_cache[res]: sample_iters.append(self.generate_samples(parsed_url, i_cache[res])) except Exception as err: LOG.exception(_('inspector call %(func)r failed for ' 'host %(host)s: %(err)s'), dict(func=func, host=parsed_url.hostname, err=err)) return itertools.chain(*sample_iters) def generate_samples(self, host_url, data): """Generate an iterable Sample from the data returned by inspector :param host_url: host url of the endpoint :param data: list of data returned by the corresponding inspector """ return (self.generate_one_sample(host_url, datum) for datum in data) @abc.abstractmethod def generate_one_sample(self, host_url, c_data): """Return one Sample. :param host_url: host url of the endpoint :param c_data: data returned by the corresponding inspector, of one of the types defined in the file ceilometer.hardware.inspector.base.CPUStats """ def _get_inspector(self, parsed_url): if parsed_url.scheme not in self.inspectors: try: driver = insloader.get_inspector(parsed_url) self.inspectors[parsed_url.scheme] = driver except Exception as err: LOG.exception(_("Can NOT load inspector %(name)s: %(err)s"), dict(name=parsed_url.scheme, err=err)) raise err return self.inspectors[parsed_url.scheme] ceilometer-2014.1.5/ceilometer/hardware/__init__.py0000664000567000056700000000000012540642613023263 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/hardware/inspector/0000775000567000056700000000000012540643223023170 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/hardware/inspector/snmp.py0000664000567000056700000001772512540642615024537 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2014 ZHAW SoE # # Authors: Lucas Graf # Toni Zehnder # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Inspector for collecting data over SNMP""" import urlparse from ceilometer.hardware.inspector import base from pysnmp.entity.rfc3413.oneliner import cmdgen class SNMPException(Exception): pass def parse_snmp_return(ret): """Check the return value of snmp operations :param ret: a tuple of (errorIndication, errorStatus, errorIndex, data) returned by pysnmp :return: a tuple of (err, data) err: True if error found, or False if no error found data: a string of error description if error found, or the actual return data of the snmp operation """ err = True (errIndication, errStatus, errIdx, varBinds) = ret if errIndication: data = errIndication elif errStatus: data = "%s at %s" % (errStatus.prettyPrint(), errIdx and varBinds[int(errIdx) - 1] or "?") else: err = False data = varBinds return (err, data) class SNMPInspector(base.Inspector): #CPU OIDs _cpu_1_min_load_oid = "1.3.6.1.4.1.2021.10.1.3.1" _cpu_5_min_load_oid = "1.3.6.1.4.1.2021.10.1.3.2" _cpu_15_min_load_oid = "1.3.6.1.4.1.2021.10.1.3.3" #Memory OIDs _memory_total_oid = "1.3.6.1.4.1.2021.4.5.0" _memory_used_oid = "1.3.6.1.4.1.2021.4.6.0" #Disk OIDs _disk_index_oid = "1.3.6.1.4.1.2021.9.1.1" _disk_path_oid = "1.3.6.1.4.1.2021.9.1.2" _disk_device_oid = "1.3.6.1.4.1.2021.9.1.3" _disk_size_oid = "1.3.6.1.4.1.2021.9.1.6" _disk_used_oid = "1.3.6.1.4.1.2021.9.1.8" #Network Interface OIDs _interface_index_oid = "1.3.6.1.2.1.2.2.1.1" _interface_name_oid = "1.3.6.1.2.1.2.2.1.2" _interface_bandwidth_oid = "1.3.6.1.2.1.2.2.1.5" _interface_mac_oid = "1.3.6.1.2.1.2.2.1.6" _interface_ip_oid = "1.3.6.1.2.1.4.20.1.2" _interface_received_oid = "1.3.6.1.2.1.2.2.1.10" _interface_transmitted_oid = "1.3.6.1.2.1.2.2.1.16" _interface_error_oid = "1.3.6.1.2.1.2.2.1.20" #Default port and security name _port = 161 _security_name = 'public' def __init__(self): super(SNMPInspector, self).__init__() self._cmdGen = cmdgen.CommandGenerator() def _get_or_walk_oid(self, oid, host, get=True): if get: func = self._cmdGen.getCmd ret_func = lambda x: x[0][1] else: func = self._cmdGen.nextCmd ret_func = lambda x: x ret = func(cmdgen.CommunityData(self._get_security_name(host)), cmdgen.UdpTransportTarget((host.hostname, host.port or self._port)), oid) (error, data) = parse_snmp_return(ret) if error: raise SNMPException("An error occurred, oid %(oid)s, " "host %(host)s, %(err)s" % dict(oid=oid, host=host.hostname, err=data)) else: return ret_func(data) def _get_value_from_oid(self, oid, host): return self._get_or_walk_oid(oid, host, True) def _walk_oid(self, oid, host): return self._get_or_walk_oid(oid, host, False) def inspect_cpu(self, host): #get 1 minute load cpu_1_min_load = \ str(self._get_value_from_oid(self._cpu_1_min_load_oid, host)) #get 5 minute load cpu_5_min_load = \ str(self._get_value_from_oid(self._cpu_5_min_load_oid, host)) #get 15 minute load cpu_15_min_load = \ str(self._get_value_from_oid(self._cpu_15_min_load_oid, host)) yield base.CPUStats(cpu_1_min=float(cpu_1_min_load), cpu_5_min=float(cpu_5_min_load), cpu_15_min=float(cpu_15_min_load)) def inspect_memory(self, host): #get total memory total = self._get_value_from_oid(self._memory_total_oid, host) #get used memory used = self._get_value_from_oid(self._memory_used_oid, host) yield base.MemoryStats(total=int(total), used=int(used)) def inspect_disk(self, host): disks = self._walk_oid(self._disk_index_oid, host) for disk in disks: for object_name, value in disk: path_oid = "%s.%s" % (self._disk_path_oid, str(value)) path = self._get_value_from_oid(path_oid, host) device_oid = "%s.%s" % (self._disk_device_oid, str(value)) device = self._get_value_from_oid(device_oid, host) size_oid = "%s.%s" % (self._disk_size_oid, str(value)) size = self._get_value_from_oid(size_oid, host) used_oid = "%s.%s" % (self._disk_used_oid, str(value)) used = self._get_value_from_oid(used_oid, host) disk = base.Disk(device=str(device), path=str(path)) stats = base.DiskStats(size=int(size), used=int(used)) yield (disk, stats) def inspect_network(self, host): net_interfaces = self._walk_oid(self._interface_index_oid, host) for interface in net_interfaces: for object_name, value in interface: ip = self._get_ip_for_interface(host, value) name_oid = "%s.%s" % (self._interface_name_oid, str(value)) name = self._get_value_from_oid(name_oid, host) mac_oid = "%s.%s" % (self._interface_mac_oid, str(value)) mac = self._get_value_from_oid(mac_oid, host) bw_oid = "%s.%s" % (self._interface_bandwidth_oid, str(value)) # bits/s to byte/s bandwidth = self._get_value_from_oid(bw_oid, host) / 8 rx_oid = "%s.%s" % (self._interface_received_oid, str(value)) rx_bytes = self._get_value_from_oid(rx_oid, host) tx_oid = "%s.%s" % (self._interface_transmitted_oid, str(value)) tx_bytes = self._get_value_from_oid(tx_oid, host) error_oid = "%s.%s" % (self._interface_error_oid, str(value)) error = self._get_value_from_oid(error_oid, host) adapted_mac = mac.prettyPrint().replace('0x', '') interface = base.Interface(name=str(name), mac=adapted_mac, ip=str(ip)) stats = base.InterfaceStats(bandwidth=int(bandwidth), rx_bytes=int(rx_bytes), tx_bytes=int(tx_bytes), error=int(error)) yield (interface, stats) def _get_security_name(self, host): options = urlparse.parse_qs(host.query) return options.get('security_name', [self._security_name])[-1] def _get_ip_for_interface(self, host, interface_id): ip_addresses = self._walk_oid(self._interface_ip_oid, host) for ip in ip_addresses: for name, value in ip: if value == interface_id: return str(name).replace(self._interface_ip_oid + ".", "") ceilometer-2014.1.5/ceilometer/hardware/inspector/base.py0000664000567000056700000000577512540642615024476 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2014 ZHAW SoE # # Authors: Lucas Graf # Toni Zehnder # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Inspector abstraction for read-only access to hardware components""" import abc import collections import six # Named tuple representing CPU statistics. # # cpu1MinLoad: 1 minute load # cpu5MinLoad: 5 minute load # cpu15MinLoad: 15 minute load # CPUStats = collections.namedtuple( 'CPUStats', ['cpu_1_min', 'cpu_5_min', 'cpu_15_min']) # Named tuple representing RAM statistics. # # total: Total Memory (bytes) # used: Used Memory (bytes) # MemoryStats = collections.namedtuple('MemoryStats', ['total', 'used']) # Named tuple representing disks. # # device: the device name for the disk # path: the path from the disk # Disk = collections.namedtuple('Disk', ['device', 'path']) # Named tuple representing disk statistics. # # size: storage size (bytes) # used: storage used (bytes) # DiskStats = collections.namedtuple('DiskStats', ['size', 'used']) # Named tuple representing an interface. # # name: the name of the interface # mac: the MAC of the interface # ip: the IP of the interface # Interface = collections.namedtuple('Interface', ['name', 'mac', 'ip']) # Named tuple representing network interface statistics. # # bandwidth: current bandwidth (bytes/s) # rx_bytes: total number of octets received (bytes) # tx_bytes: total number of octets transmitted (bytes) # error: number of outbound packets not transmitted because of errors # InterfaceStats = collections.namedtuple( 'InterfaceStats', ['bandwidth', 'rx_bytes', 'tx_bytes', 'error']) @six.add_metaclass(abc.ABCMeta) class Inspector(object): @abc.abstractmethod def inspect_cpu(self, host): """Inspect the CPU statistics for a host. :param host: the target host :return: iterator of CPUStats """ @abc.abstractmethod def inspect_disk(self, host): """Inspect the disk statistics for a host. :param : the target host :return: iterator of tuple (Disk, DiskStats) """ @abc.abstractmethod def inspect_memory(self, host): """Inspect the ram statistics for a host. :param : the target host :return: iterator of MemoryStats """ @abc.abstractmethod def inspect_network(self, host): """Inspect the network interfaces for a host. :param : the target host :return: iterator of tuple (Interface, InterfaceStats) """ ceilometer-2014.1.5/ceilometer/hardware/inspector/__init__.py0000664000567000056700000000202712540642615025306 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2014 Intel Corp. # # Author: Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from stevedore import driver def get_inspector(parsed_url, namespace='ceilometer.hardware.inspectors'): """Get inspector driver and load it. :param parsed_url: urlparse.SplitResult object for the inspector :param namespace: Namespace to use to look for drivers. """ loaded_driver = driver.DriverManager(namespace, parsed_url.scheme) return loaded_driver.driver() ceilometer-2014.1.5/ceilometer/hardware/pollsters/0000775000567000056700000000000012540643223023211 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/hardware/pollsters/util.py0000664000567000056700000000324712540642615024552 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import urlparse from ceilometer.openstack.common import timeutils from ceilometer import sample def get_metadata_from_host(host_url): return {'resource_url': urlparse.urlunsplit(host_url)} def make_sample_from_host(host_url, name, type, unit, volume, project_id=None, user_id=None, res_metadata=None): resource_metadata = dict() if res_metadata is not None: metadata = copy.copy(res_metadata) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata.update(get_metadata_from_host(host_url)) return sample.Sample( name='hardware.' + name, type=type, unit=unit, volume=volume, user_id=project_id, project_id=user_id, resource_id=host_url.hostname, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, source='hardware', ) ceilometer-2014.1.5/ceilometer/hardware/pollsters/cpu.py0000664000567000056700000000444112540642615024361 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.hardware import plugin from ceilometer.hardware.pollsters import util from ceilometer import sample class _Base(plugin.HardwarePollster): CACHE_KEY = 'cpu' INSPECT_METHOD = 'inspect_cpu' class CPULoad1MinPollster(_Base): @staticmethod def generate_one_sample(host, c_data): return util.make_sample_from_host(host, name='cpu.load.1min', type=sample.TYPE_GAUGE, unit='process', volume=c_data.cpu_1_min, ) class CPULoad5MinPollster(_Base): @staticmethod def generate_one_sample(host, c_data): return util.make_sample_from_host(host, name='cpu.load.5min', type=sample.TYPE_GAUGE, unit='process', volume=c_data.cpu_5_min, ) class CPULoad15MinPollster(_Base): @staticmethod def generate_one_sample(host, c_data): return util.make_sample_from_host(host, name='cpu.load.15min', type=sample.TYPE_GAUGE, unit='process', volume=c_data.cpu_15_min, ) ceilometer-2014.1.5/ceilometer/hardware/pollsters/__init__.py0000664000567000056700000000000012540642613025312 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/hardware/pollsters/net.py0000664000567000056700000000614612540642615024364 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.hardware import plugin from ceilometer.hardware.pollsters import util from ceilometer import sample class _Base(plugin.HardwarePollster): CACHE_KEY = 'nic' INSPECT_METHOD = 'inspect_network' class BandwidthBytesPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (nic, info) = c_data return util.make_sample_from_host(host, name='network.bandwidth.bytes', type=sample.TYPE_CUMULATIVE, unit='B', volume=info.bandwidth, res_metadata=nic, ) class IncomingBytesPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (nic, info) = c_data return util.make_sample_from_host(host, name='network.incoming.bytes', type=sample.TYPE_CUMULATIVE, unit='B', volume=info.rx_bytes, res_metadata=nic, ) class OutgoingBytesPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (nic, info) = c_data return util.make_sample_from_host(host, name='network.outgoing.bytes', type=sample.TYPE_CUMULATIVE, unit='B', volume=info.tx_bytes, res_metadata=nic, ) class OutgoingErrorsPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (nic, info) = c_data return util.make_sample_from_host(host, name='network.outgoing.errors', type=sample.TYPE_CUMULATIVE, unit='packet', volume=info.error, res_metadata=nic, ) ceilometer-2014.1.5/ceilometer/hardware/pollsters/disk.py0000664000567000056700000000377512540642615024535 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.hardware import plugin from ceilometer.hardware.pollsters import util from ceilometer import sample class _Base(plugin.HardwarePollster): CACHE_KEY = 'disk' INSPECT_METHOD = 'inspect_disk' class DiskTotalPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (disk, info) = c_data return util.make_sample_from_host(host, name='disk.size.total', type=sample.TYPE_GAUGE, unit='B', volume=info.size, res_metadata=disk, ) class DiskUsedPollster(_Base): @staticmethod def generate_one_sample(host, c_data): (disk, info) = c_data return util.make_sample_from_host(host, name='disk.size.used', type=sample.TYPE_GAUGE, unit='B', volume=info.used, res_metadata=disk, ) ceilometer-2014.1.5/ceilometer/hardware/pollsters/memory.py0000664000567000056700000000351612540642615025104 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 ZHAW SoE # Copyright © 2014 Intel Corp. # # Authors: Lucas Graf # Toni Zehnder # Lianhao Lu # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.hardware import plugin from ceilometer.hardware.pollsters import util from ceilometer import sample class _Base(plugin.HardwarePollster): CACHE_KEY = 'memory' INSPECT_METHOD = 'inspect_memory' class MemoryTotalPollster(_Base): @staticmethod def generate_one_sample(host, c_data): return util.make_sample_from_host(host, name='memory.total', type=sample.TYPE_GAUGE, unit='B', volume=c_data.total, ) class MemoryUsedPollster(_Base): @staticmethod def generate_one_sample(host, c_data): return util.make_sample_from_host(host, name='memory.used', type=sample.TYPE_GAUGE, unit='B', volume=c_data.used, ) ceilometer-2014.1.5/ceilometer/notifier.py0000664000567000056700000000436612540642615021573 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.openstack.common import context as req_context from ceilometer.openstack.common import log as logging from ceilometer import pipeline from ceilometer import transformer from ceilometer.openstack.common.gettextutils import _ # noqa from stevedore import extension LOG = logging.getLogger(__name__) _notification_manager = None _pipeline_manager = None def _load_notification_manager(): global _notification_manager namespace = 'ceilometer.notification' LOG.debug(_('loading notification handlers from %s'), namespace) _notification_manager = extension.ExtensionManager( namespace=namespace, invoke_on_load=True) if not list(_notification_manager): LOG.warning(_('Failed to load any notification handlers for %s'), namespace) def _load_pipeline_manager(): global _pipeline_manager _pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager( 'ceilometer.transformer', ), ) def _process_notification_for_ext(ext, context, notification): with _pipeline_manager.publisher(context) as p: # FIXME(dhellmann): Spawn green thread? p(list(ext.obj.to_samples(notification))) def notify(context, message): """Sends a notification as a meter using Ceilometer pipelines.""" if not _notification_manager: _load_notification_manager() if not _pipeline_manager: _load_pipeline_manager() _notification_manager.map( _process_notification_for_ext, context=context or req_context.get_admin_context(), notification=message) ceilometer-2014.1.5/ceilometer/energy/0000775000567000056700000000000012540643223020656 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/energy/__init__.py0000664000567000056700000000000012540642613022757 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/energy/kwapi.py0000664000567000056700000000760412540642615022356 0ustar jenkinsjenkins00000000000000# -*- coding: utf-8 -*- # # Author: François Rossigneux # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime from keystoneclient import exceptions from oslo.config import cfg import requests from ceilometer.central import plugin from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer import sample LOG = log.getLogger(__name__) class KwapiClient(object): """Kwapi API client.""" def __init__(self, url, token=None): """Initializes client.""" self.url = url self.token = token def iter_probes(self): """Returns a list of dicts describing all probes.""" probes_url = self.url + '/probes/' headers = {} if self.token is not None: headers = {'X-Auth-Token': self.token} request = requests.get(probes_url, headers=headers) message = request.json() probes = message['probes'] for key, value in probes.iteritems(): probe_dict = value probe_dict['id'] = key yield probe_dict class _Base(plugin.CentralPollster): """Base class for the Kwapi pollster, derived from CentralPollster.""" @staticmethod def get_kwapi_client(ksclient): """Returns a KwapiClient configured with the proper url and token.""" endpoint = ksclient.service_catalog.url_for( service_type='energy', endpoint_type=cfg.CONF.service_credentials.os_endpoint_type) return KwapiClient(endpoint, ksclient.auth_token) CACHE_KEY_PROBE = 'kwapi.probes' def _iter_probes(self, ksclient, cache): """Iterate over all probes.""" if self.CACHE_KEY_PROBE not in cache: cache[self.CACHE_KEY_PROBE] = self._get_probes(ksclient) return iter(cache[self.CACHE_KEY_PROBE]) def _get_probes(self, ksclient): try: client = self.get_kwapi_client(ksclient) except exceptions.EndpointNotFound: LOG.debug(_("Kwapi endpoint not found")) return [] return list(client.iter_probes()) class EnergyPollster(_Base): """Measures energy consumption.""" def get_samples(self, manager, cache, resources=[]): """Returns all samples.""" for probe in self._iter_probes(manager.keystone, cache): yield sample.Sample( name='energy', type=sample.TYPE_CUMULATIVE, unit='kWh', volume=probe['kwh'], user_id=None, project_id=None, resource_id=probe['id'], timestamp=datetime.datetime.fromtimestamp( probe['timestamp']).isoformat(), resource_metadata={} ) class PowerPollster(_Base): """Measures power consumption.""" def get_samples(self, manager, cache, resources=[]): """Returns all samples.""" for probe in self._iter_probes(manager.keystone, cache): yield sample.Sample( name='power', type=sample.TYPE_GAUGE, unit='W', volume=probe['w'], user_id=None, project_id=None, resource_id=probe['id'], timestamp=datetime.datetime.fromtimestamp( probe['timestamp']).isoformat(), resource_metadata={} ) ceilometer-2014.1.5/ceilometer/alarm/0000775000567000056700000000000012540643223020461 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/alarm/notifier/0000775000567000056700000000000012540643223022300 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/alarm/notifier/test.py0000664000567000056700000000226412540642615023641 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test alarm notifier.""" from ceilometer.alarm import notifier class TestAlarmNotifier(notifier.AlarmNotifier): "Test alarm notifier.""" def __init__(self): self.notifications = [] def notify(self, action, alarm_id, previous, current, reason, reason_data): self.notifications.append((action, alarm_id, previous, current, reason, reason_data)) ceilometer-2014.1.5/ceilometer/alarm/notifier/rest.py0000664000567000056700000000605712540642615023643 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Mehdi Abaakouk # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rest alarm notifier.""" import eventlet import requests import six.moves.urllib.parse as urlparse from oslo.config import cfg from ceilometer.alarm import notifier from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import log LOG = log.getLogger(__name__) REST_NOTIFIER_OPTS = [ cfg.StrOpt('rest_notifier_certificate_file', default='', help='SSL Client certificate for REST notifier.' ), cfg.StrOpt('rest_notifier_certificate_key', default='', help='SSL Client private key for REST notifier.' ), cfg.BoolOpt('rest_notifier_ssl_verify', default=True, help='Whether to verify the SSL Server certificate when ' 'calling alarm action.' ), ] cfg.CONF.register_opts(REST_NOTIFIER_OPTS, group="alarm") class RestAlarmNotifier(notifier.AlarmNotifier): """Rest alarm notifier.""" @staticmethod def notify(action, alarm_id, previous, current, reason, reason_data, headers=None): LOG.info(_( "Notifying alarm %(alarm_id)s from %(previous)s " "to %(current)s with action %(action)s because " "%(reason)s") % ({'alarm_id': alarm_id, 'previous': previous, 'current': current, 'action': action, 'reason': reason})) body = {'alarm_id': alarm_id, 'previous': previous, 'current': current, 'reason': reason, 'reason_data': reason_data} headers = headers or {} headers['content-type'] = 'application/json' kwargs = {'data': jsonutils.dumps(body), 'headers': headers} if action.scheme == 'https': default_verify = int(cfg.CONF.alarm.rest_notifier_ssl_verify) options = urlparse.parse_qs(action.query) verify = bool(int(options.get('ceilometer-alarm-ssl-verify', [default_verify])[-1])) kwargs['verify'] = verify cert = cfg.CONF.alarm.rest_notifier_certificate_file key = cfg.CONF.alarm.rest_notifier_certificate_key if cert: kwargs['cert'] = (cert, key) if key else cert eventlet.spawn_n(requests.post, action.geturl(), **kwargs) ceilometer-2014.1.5/ceilometer/alarm/notifier/log.py0000664000567000056700000000252012540642615023436 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Log alarm notifier.""" from ceilometer.alarm import notifier from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log LOG = log.getLogger(__name__) class LogAlarmNotifier(notifier.AlarmNotifier): "Log alarm notifier.""" @staticmethod def notify(action, alarm_id, previous, current, reason, reason_data): LOG.info(_( "Notifying alarm %(alarm_id)s from %(previous)s " "to %(current)s with action %(action)s because " "%(reason)s") % ({'alarm_id': alarm_id, 'previous': previous, 'current': current, 'action': action, 'reason': reason})) ceilometer-2014.1.5/ceilometer/alarm/notifier/__init__.py0000664000567000056700000000250012540642615024412 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import six @six.add_metaclass(abc.ABCMeta) class AlarmNotifier(object): """Base class for alarm notifier plugins.""" @abc.abstractmethod def notify(self, action, alarm_id, previous, current, reason, reason_data): """Notify that an alarm has been triggered. :param action: The action that is being attended, as a parsed URL. :param alarm_id: The triggered alarm. :param previous: The previous state of the alarm. :param current: The current state of the alarm. :param reason: The reason the alarm changed its state. :param reason_data: A dict representation of the reason. """ ceilometer-2014.1.5/ceilometer/alarm/rpc.py0000664000567000056700000000646712540642615021640 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Authors: Mehdi Abaakouk # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from ceilometer.openstack.common import context from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common.rpc import proxy as rpc_proxy from ceilometer.storage import models OPTS = [ cfg.StrOpt('notifier_rpc_topic', default='alarm_notifier', help='The topic that ceilometer uses for alarm notifier ' 'messages.'), cfg.StrOpt('partition_rpc_topic', default='alarm_partition_coordination', help='The topic that ceilometer uses for alarm partition ' 'coordination messages.'), ] cfg.CONF.register_opts(OPTS, group='alarm') LOG = log.getLogger(__name__) class RPCAlarmNotifier(rpc_proxy.RpcProxy): def __init__(self): super(RPCAlarmNotifier, self).__init__( default_version='1.0', topic=cfg.CONF.alarm.notifier_rpc_topic) def notify(self, alarm, previous, reason, reason_data): actions = getattr(alarm, models.Alarm.ALARM_ACTIONS_MAP[alarm.state]) if not actions: LOG.debug(_('alarm %(alarm_id)s has no action configured ' 'for state transition from %(previous)s to ' 'state %(state)s, skipping the notification.') % {'alarm_id': alarm.alarm_id, 'previous': previous, 'state': alarm.state}) return msg = self.make_msg('notify_alarm', data={ 'actions': actions, 'alarm_id': alarm.alarm_id, 'previous': previous, 'current': alarm.state, 'reason': unicode(reason), 'reason_data': reason_data}) self.cast(context.get_admin_context(), msg) class RPCAlarmPartitionCoordination(rpc_proxy.RpcProxy): def __init__(self): super(RPCAlarmPartitionCoordination, self).__init__( default_version='1.0', topic=cfg.CONF.alarm.partition_rpc_topic) def presence(self, uuid, priority): msg = self.make_msg('presence', data={ 'uuid': uuid, 'priority': priority}) self.fanout_cast(context.get_admin_context(), msg) def assign(self, uuid, alarms): msg = self.make_msg('assign', data={ 'uuid': uuid, 'alarms': alarms}) return self.fanout_cast(context.get_admin_context(), msg) def allocate(self, uuid, alarms): msg = self.make_msg('allocate', data={ 'uuid': uuid, 'alarms': alarms}) return self.fanout_cast(context.get_admin_context(), msg) ceilometer-2014.1.5/ceilometer/alarm/__init__.py0000664000567000056700000000000012540642615022564 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/alarm/service.py0000664000567000056700000002344312540642615022505 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 Red Hat, Inc # Copyright © 2013 eNovance # # Authors: Eoghan Glynn # Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc from ceilometerclient import client as ceiloclient from oslo.config import cfg import six from stevedore import extension from ceilometer.alarm.partition import coordination from ceilometer.alarm import rpc as rpc_alarm from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import network_utils from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher from ceilometer.openstack.common.rpc import service as rpc_service from ceilometer.openstack.common import service as os_service OPTS = [ cfg.IntOpt('evaluation_interval', default=60, help='Period of evaluation cycle, should' ' be >= than configured pipeline interval for' ' collection of underlying metrics.', deprecated_opts=[cfg.DeprecatedOpt( 'threshold_evaluation_interval', group='alarm')]), ] cfg.CONF.register_opts(OPTS, group='alarm') cfg.CONF.import_opt('notifier_rpc_topic', 'ceilometer.alarm.rpc', group='alarm') cfg.CONF.import_opt('partition_rpc_topic', 'ceilometer.alarm.rpc', group='alarm') LOG = log.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class AlarmService(object): EXTENSIONS_NAMESPACE = "ceilometer.alarm.evaluator" def _load_evaluators(self): self.evaluators = extension.ExtensionManager( namespace=self.EXTENSIONS_NAMESPACE, invoke_on_load=True, invoke_args=(rpc_alarm.RPCAlarmNotifier(),) ) self.supported_evaluators = [ext.name for ext in self.evaluators.extensions] @property def _client(self): """Construct or reuse an authenticated API client.""" if not self.api_client: auth_config = cfg.CONF.service_credentials creds = dict( os_auth_url=auth_config.os_auth_url, os_region_name=auth_config.os_region_name, os_tenant_name=auth_config.os_tenant_name, os_password=auth_config.os_password, os_username=auth_config.os_username, os_cacert=auth_config.os_cacert, os_endpoint_type=auth_config.os_endpoint_type, insecure=auth_config.insecure, ) self.api_client = ceiloclient.get_client(2, **creds) return self.api_client def _evaluate_assigned_alarms(self): try: alarms = self._assigned_alarms() LOG.info(_('initiating evaluation cycle on %d alarms') % len(alarms)) for alarm in alarms: self._evaluate_alarm(alarm) except Exception: LOG.exception(_('alarm evaluation cycle failed')) def _evaluate_alarm(self, alarm): """Evaluate the alarms assigned to this evaluator.""" if alarm.type not in self.supported_evaluators: LOG.debug(_('skipping alarm %s: type unsupported') % alarm.alarm_id) return LOG.debug(_('evaluating alarm %s') % alarm.alarm_id) self.evaluators[alarm.type].obj.evaluate(alarm) @abc.abstractmethod def _assigned_alarms(self): pass class SingletonAlarmService(AlarmService, os_service.Service): def __init__(self): super(SingletonAlarmService, self).__init__() self._load_evaluators() self.api_client = None def start(self): super(SingletonAlarmService, self).start() if self.evaluators: interval = cfg.CONF.alarm.evaluation_interval self.tg.add_timer( interval, self._evaluate_assigned_alarms, 0) # Add a dummy thread to have wait() working self.tg.add_timer(604800, lambda: None) def _assigned_alarms(self): return self._client.alarms.list(q=[{'field': 'enabled', 'value': True}]) cfg.CONF.import_opt('host', 'ceilometer.service') class PartitionedAlarmService(AlarmService, rpc_service.Service): def __init__(self): super(PartitionedAlarmService, self).__init__( cfg.CONF.host, cfg.CONF.alarm.partition_rpc_topic, self ) self._load_evaluators() self.api_client = None self.partition_coordinator = coordination.PartitionCoordinator() def initialize_service_hook(self, service): LOG.debug(_('initialize_service_hooks')) self.conn.create_worker( cfg.CONF.alarm.partition_rpc_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.alarm.' + cfg.CONF.alarm.partition_rpc_topic, ) def start(self): super(PartitionedAlarmService, self).start() if self.evaluators: eval_interval = cfg.CONF.alarm.evaluation_interval self.tg.add_timer( eval_interval / 4, self.partition_coordinator.report_presence, 0) self.tg.add_timer( eval_interval / 2, self.partition_coordinator.check_mastership, eval_interval, *[eval_interval, self._client]) self.tg.add_timer( eval_interval, self._evaluate_assigned_alarms, eval_interval) # Add a dummy thread to have wait() working self.tg.add_timer(604800, lambda: None) def _assigned_alarms(self): return self.partition_coordinator.assigned_alarms(self._client) def presence(self, context, data): self.partition_coordinator.presence(data.get('uuid'), data.get('priority')) def assign(self, context, data): self.partition_coordinator.assign(data.get('uuid'), data.get('alarms')) def allocate(self, context, data): self.partition_coordinator.allocate(data.get('uuid'), data.get('alarms')) class AlarmNotifierService(rpc_service.Service): EXTENSIONS_NAMESPACE = "ceilometer.alarm.notifier" def __init__(self, host, topic): super(AlarmNotifierService, self).__init__(host, topic, self) self.notifiers = extension.ExtensionManager(self.EXTENSIONS_NAMESPACE, invoke_on_load=True) def start(self): super(AlarmNotifierService, self).start() # Add a dummy thread to have wait() working self.tg.add_timer(604800, lambda: None) def initialize_service_hook(self, service): LOG.debug(_('initialize_service_hooks')) self.conn.create_worker( cfg.CONF.alarm.notifier_rpc_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.alarm.' + cfg.CONF.alarm.notifier_rpc_topic, ) def _handle_action(self, action, alarm_id, previous, current, reason, reason_data): try: action = network_utils.urlsplit(action) except Exception: LOG.error( _("Unable to parse action %(action)s for alarm %(alarm_id)s"), {'action': action, 'alarm_id': alarm_id}) return try: notifier = self.notifiers[action.scheme].obj except KeyError: scheme = action.scheme LOG.error( _("Action %(scheme)s for alarm %(alarm_id)s is unknown, " "cannot notify"), {'scheme': scheme, 'alarm_id': alarm_id}) return try: LOG.debug(_("Notifying alarm %(id)s with action %(act)s") % ( {'id': alarm_id, 'act': action})) notifier.notify(action, alarm_id, previous, current, reason, reason_data) except Exception: LOG.exception(_("Unable to notify alarm %s"), alarm_id) return def notify_alarm(self, context, data): """Notify that alarm has been triggered. data should be a dict with the following keys: - actions, the URL of the action to run; this is a mapped to extensions automatically - alarm_id, the ID of the alarm that has been triggered - previous, the previous state of the alarm - current, the new state the alarm has transitioned to - reason, the reason the alarm changed its state - reason_data, a dict representation of the reason :param context: Request context. :param data: A dict as described above. """ actions = data.get('actions') if not actions: LOG.error(_("Unable to notify for an alarm with no action")) return for action in actions: self._handle_action(action, data.get('alarm_id'), data.get('previous'), data.get('current'), data.get('reason'), data.get('reason_data')) ceilometer-2014.1.5/ceilometer/alarm/evaluator/0000775000567000056700000000000012540643223022463 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/alarm/evaluator/threshold.py0000664000567000056700000001764112540642615025046 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 Red Hat, Inc # # Author: Eoghan Glynn # Author: Mehdi Abaakouk # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import operator from ceilometer.alarm import evaluator from ceilometer.alarm.evaluator import utils from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import timeutils LOG = log.getLogger(__name__) COMPARATORS = { 'gt': operator.gt, 'lt': operator.lt, 'ge': operator.ge, 'le': operator.le, 'eq': operator.eq, 'ne': operator.ne, } class ThresholdEvaluator(evaluator.Evaluator): # the sliding evaluation window is extended to allow # for reporting/ingestion lag look_back = 1 # minimum number of datapoints within sliding window to # avoid unknown state quorum = 1 @classmethod def _bound_duration(cls, alarm, constraints): """Bound the duration of the statistics query.""" now = timeutils.utcnow() # when exclusion of weak datapoints is enabled, we extend # the look-back period so as to allow a clearer sample count # trend to be established look_back = (cls.look_back if not alarm.rule.get('exclude_outliers') else alarm.rule['evaluation_periods']) window = (alarm.rule['period'] * (alarm.rule['evaluation_periods'] + look_back)) start = now - datetime.timedelta(seconds=window) LOG.debug(_('query stats from %(start)s to ' '%(now)s') % {'start': start, 'now': now}) after = dict(field='timestamp', op='ge', value=start.isoformat()) before = dict(field='timestamp', op='le', value=now.isoformat()) constraints.extend([before, after]) return constraints @staticmethod def _sanitize(alarm, statistics): """Sanitize statistics. """ LOG.debug(_('sanitize stats %s') % statistics) if alarm.rule.get('exclude_outliers'): key = operator.attrgetter('count') mean = utils.mean(statistics, key) stddev = utils.stddev(statistics, key, mean) lower = mean - 2 * stddev upper = mean + 2 * stddev inliers, outliers = utils.anomalies(statistics, key, lower, upper) if outliers: LOG.debug(_('excluded weak datapoints with sample counts %s'), [s.count for s in outliers]) statistics = inliers else: LOG.debug('no excluded weak datapoints') # in practice statistics are always sorted by period start, not # strictly required by the API though statistics = statistics[-alarm.rule['evaluation_periods']:] LOG.debug(_('pruned statistics to %d') % len(statistics)) return statistics def _statistics(self, alarm, query): """Retrieve statistics over the current window.""" LOG.debug(_('stats query %s') % query) try: return self._client.statistics.list( meter_name=alarm.rule['meter_name'], q=query, period=alarm.rule['period']) except Exception: LOG.exception(_('alarm stats retrieval failed')) return [] def _sufficient(self, alarm, statistics): """Ensure there is sufficient data for evaluation, transitioning to unknown otherwise. """ sufficient = len(statistics) >= self.quorum if not sufficient and alarm.state != evaluator.UNKNOWN: reason = _('%d datapoints are unknown') % alarm.rule[ 'evaluation_periods'] reason_data = self._reason_data('unknown', alarm.rule['evaluation_periods'], None) self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data) return sufficient @staticmethod def _reason_data(disposition, count, most_recent): """Create a reason data dictionary for this evaluator type. """ return {'type': 'threshold', 'disposition': disposition, 'count': count, 'most_recent': most_recent} @classmethod def _reason(cls, alarm, statistics, distilled, state): """Fabricate reason string.""" count = len(statistics) disposition = 'inside' if state == evaluator.OK else 'outside' last = getattr(statistics[-1], alarm.rule['statistic']) transition = alarm.state != state reason_data = cls._reason_data(disposition, count, last) if transition: return (_('Transition to %(state)s due to %(count)d samples' ' %(disposition)s threshold, most recent:' ' %(most_recent)s') % dict(reason_data, state=state)), reason_data return (_('Remaining as %(state)s due to %(count)d samples' ' %(disposition)s threshold, most recent: %(most_recent)s') % dict(reason_data, state=state)), reason_data def _transition(self, alarm, statistics, compared): """Transition alarm state if necessary. The transition rules are currently hardcoded as: - transitioning from a known state requires an unequivocal set of datapoints - transitioning from unknown is on the basis of the most recent datapoint if equivocal Ultimately this will be policy-driven. """ distilled = all(compared) unequivocal = distilled or not any(compared) unknown = alarm.state == evaluator.UNKNOWN continuous = alarm.repeat_actions if unequivocal: state = evaluator.ALARM if distilled else evaluator.OK reason, reason_data = self._reason(alarm, statistics, distilled, state) if alarm.state != state or continuous: self._refresh(alarm, state, reason, reason_data) elif unknown or continuous: trending_state = evaluator.ALARM if compared[-1] else evaluator.OK state = trending_state if unknown else alarm.state reason, reason_data = self._reason(alarm, statistics, distilled, state) self._refresh(alarm, state, reason, reason_data) def evaluate(self, alarm): if not self.within_time_constraint(alarm): LOG.debug(_('Attempted to evaluate alarm %s, but it is not ' 'within its time constraint.') % alarm.alarm_id) return query = self._bound_duration( alarm, alarm.rule['query'] ) statistics = self._sanitize( alarm, self._statistics(alarm, query) ) if self._sufficient(alarm, statistics): def _compare(stat): op = COMPARATORS[alarm.rule['comparison_operator']] value = getattr(stat, alarm.rule['statistic']) limit = alarm.rule['threshold'] LOG.debug(_('comparing value %(value)s against threshold' ' %(limit)s') % {'value': value, 'limit': limit}) return op(value, limit) self._transition(alarm, statistics, map(_compare, statistics)) ceilometer-2014.1.5/ceilometer/alarm/evaluator/combination.py0000664000567000056700000001014512540642615025344 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Authors: Mehdi Abaakouk # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from ceilometer.alarm import evaluator from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log LOG = log.getLogger(__name__) COMPARATORS = {'and': all, 'or': any} class CombinationEvaluator(evaluator.Evaluator): def _get_alarm_state(self, alarm_id): try: alarm = self._client.alarms.get(alarm_id) except Exception: LOG.exception(_('alarm retrieval failed')) return None return alarm.state def _sufficient_states(self, alarm, states): """Ensure there is sufficient data for evaluation, transitioning to unknown otherwise. """ #note(sileht): alarm can be evaluated only with #stable state of other alarm alarms_missing_states = [alarm_id for alarm_id, state in states if not state or state == evaluator.UNKNOWN] sufficient = len(alarms_missing_states) == 0 if not sufficient and alarm.state != evaluator.UNKNOWN: reason = _('Alarms %(alarm_ids)s' ' are in unknown state') % \ {'alarm_ids': ",".join(alarms_missing_states)} reason_data = self._reason_data(alarms_missing_states) self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data) return sufficient @staticmethod def _reason_data(alarm_ids): """Create a reason data dictionary for this evaluator type. """ return {'type': 'combination', 'alarm_ids': alarm_ids} @classmethod def _reason(cls, alarm, state, underlying_states): """Fabricate reason string.""" transition = alarm.state != state alarms_to_report = [alarm_id for alarm_id, alarm_state in underlying_states if alarm_state == state] reason_data = cls._reason_data(alarms_to_report) if transition: return (_('Transition to %(state)s due to alarms' ' %(alarm_ids)s in state %(state)s') % {'state': state, 'alarm_ids': ",".join(alarms_to_report)}), reason_data return (_('Remaining as %(state)s due to alarms' ' %(alarm_ids)s in state %(state)s') % {'state': state, 'alarm_ids': ",".join(alarms_to_report)}), reason_data def _transition(self, alarm, underlying_states): """Transition alarm state if necessary. """ op = alarm.rule['operator'] if COMPARATORS[op](s == evaluator.ALARM for __, s in underlying_states): state = evaluator.ALARM else: state = evaluator.OK continuous = alarm.repeat_actions reason, reason_data = self._reason(alarm, state, underlying_states) if alarm.state != state or continuous: self._refresh(alarm, state, reason, reason_data) def evaluate(self, alarm): if not self.within_time_constraint(alarm): LOG.debug(_('Attempted to evaluate alarm %s, but it is not ' 'within its time constraint.') % alarm.alarm_id) return states = zip(alarm.rule['alarm_ids'], itertools.imap(self._get_alarm_state, alarm.rule['alarm_ids'])) if self._sufficient_states(alarm, states): self._transition(alarm, states) ceilometer-2014.1.5/ceilometer/alarm/evaluator/__init__.py0000664000567000056700000001064012540642615024601 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Authors: Mehdi Abaakouk # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import croniter import datetime import pytz from ceilometerclient import client as ceiloclient from oslo.config import cfg import six from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import timeutils LOG = log.getLogger(__name__) UNKNOWN = 'insufficient data' OK = 'ok' ALARM = 'alarm' @six.add_metaclass(abc.ABCMeta) class Evaluator(object): """Base class for alarm rule evaluator plugins.""" def __init__(self, notifier): self.notifier = notifier self.api_client = None @property def _client(self): """Construct or reuse an authenticated API client.""" if not self.api_client: auth_config = cfg.CONF.service_credentials creds = dict( os_auth_url=auth_config.os_auth_url, os_region_name=auth_config.os_region_name, os_tenant_name=auth_config.os_tenant_name, os_password=auth_config.os_password, os_username=auth_config.os_username, os_cacert=auth_config.os_cacert, os_endpoint_type=auth_config.os_endpoint_type, insecure=auth_config.insecure, ) self.api_client = ceiloclient.get_client(2, **creds) return self.api_client def _refresh(self, alarm, state, reason, reason_data): """Refresh alarm state.""" try: previous = alarm.state if previous != state: LOG.info(_('alarm %(id)s transitioning to %(state)s because ' '%(reason)s') % {'id': alarm.alarm_id, 'state': state, 'reason': reason}) self._client.alarms.set_state(alarm.alarm_id, state=state) alarm.state = state if self.notifier: self.notifier.notify(alarm, previous, reason, reason_data) except Exception: # retry will occur naturally on the next evaluation # cycle (unless alarm state reverts in the meantime) LOG.exception(_('alarm state update failed')) @classmethod def within_time_constraint(cls, alarm): """Check whether the alarm is within at least one of its time constraints. If there are none, then the answer is yes. """ if not alarm.time_constraints: return True now_utc = timeutils.utcnow() for tc in alarm.time_constraints: tz = pytz.timezone(tc['timezone']) if tc['timezone'] else None now_tz = now_utc.astimezone(tz) if tz else now_utc start_cron = croniter.croniter(tc['start'], now_tz) if cls._is_exact_match(start_cron, now_tz): return True latest_start = start_cron.get_prev(datetime.datetime) duration = datetime.timedelta(seconds=tc['duration']) if latest_start <= now_tz <= latest_start + duration: return True return False @staticmethod def _is_exact_match(cron, ts): """Handle edge case where if the timestamp is the same as the cron point in time to the minute, croniter returns the previous start, not the current. We can check this by first going one step back and then one step forward and check if we are at the original point in time. """ cron.get_prev() diff = timeutils.total_seconds(ts - cron.get_next(datetime.datetime)) return abs(diff) < 60 # minute precision @abc.abstractmethod def evaluate(self, alarm): '''interface definition evaluate an alarm alarm Alarm: an instance of the Alarm ''' ceilometer-2014.1.5/ceilometer/alarm/evaluator/utils.py0000664000567000056700000000334512540642615024206 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2014 Red Hat, Inc # # Author: Eoghan Glynn # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import math def mean(s, key=lambda x: x): """Calculate the mean of a numeric list. """ count = float(len(s)) if count: return math.fsum(map(key, s)) / count return 0.0 def deltas(s, key, m=None): """Calculate the squared distances from mean for a numeric list. """ m = m or mean(s, key) return [(key(i) - m) ** 2 for i in s] def variance(s, key, m=None): """Calculate the variance of a numeric list. """ return mean(deltas(s, key, m)) def stddev(s, key, m=None): """Calculate the standard deviation of a numeric list. """ return math.sqrt(variance(s, key, m)) def outside(s, key, lower=0.0, upper=0.0): """Determine if value falls outside upper and lower bounds. """ v = key(s) return v < lower or v > upper def anomalies(s, key, lower=0.0, upper=0.0): """Separate anomalous data points from the in-liers. """ inliers = [] outliers = [] for i in s: if outside(i, key, lower, upper): outliers.append(i) else: inliers.append(i) return inliers, outliers ceilometer-2014.1.5/ceilometer/alarm/partition/0000775000567000056700000000000012540643223022472 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/alarm/partition/coordination.py0000664000567000056700000003101112540642615025534 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 Red Hat, Inc # # Authors: Eoghan Glynn # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import math import random import uuid from ceilometer.alarm import rpc as rpc_alarm from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import timeutils LOG = log.getLogger(__name__) class PartitionIdentity(object): """Representation of a partition's identity for age comparison.""" def __init__(self, uuid, priority): self.uuid = uuid self.priority = priority def __repr__(self): return '%s:%s' % (self.uuid, self.priority) def __hash__(self): return hash((self.uuid, self.priority)) def __eq__(self, other): if not isinstance(other, PartitionIdentity): return False return self.priority == other.priority and self.uuid == other.uuid def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): if not other: return True if not isinstance(other, PartitionIdentity): return False older = self.priority < other.priority tie_broken = (self.priority == other.priority and self.uuid < other.uuid) return older or tie_broken def __gt__(self, other): return not (self.__lt__(other) or self.__eq__(other)) class PartitionCoordinator(object): """Implements the alarm partition coordination protocol. A simple protocol based on AMQP fanout RPC is used. All available partitions report their presence periodically. The priority of each partition in terms of assuming mastership is determined by earliest start-time (with a UUID-based tiebreaker in the unlikely event of a time clash). A single partition assumes mastership at any given time, taking responsibility for allocating the alarms to be evaluated across the set of currently available partitions. When a partition lifecycle event is detected (i.e. a pre-existing partition fails to report its presence, or a new one is started up), a complete rebalance of the alarms is initiated. Individual alarm lifecycle events, on the other hand, do not require a full re-balance. Instead new alarms are allocated as they are detected, whereas deleted alarms are initially allowed to remain within the allocation (as the individual evaluators are tolerant of assigned alarms not existing, and the deleted alarms should be randomly distributed over the partitions). However once the number of alarms deleted since the last rebalance reaches a certain limit, a rebalance will be initiated to maintain equity. As presence reports are received, each partition keeps track of the oldest partition it currently knows about, allowing an assumption of mastership to be aborted if an older partition belatedly reports. """ def __init__(self): # uniqueness is based on a combination of starting timestamp # and UUID self.start = timeutils.utcnow() self.this = PartitionIdentity(str(uuid.uuid4()), float(self.start.strftime('%s.%f'))) self.oldest = None # fan-out RPC self.coordination_rpc = rpc_alarm.RPCAlarmPartitionCoordination() # state maintained by the master self.is_master = False self.presence_changed = False self.reports = {} self.last_alarms = set() self.deleted_alarms = set() # alarms for evaluation, relevant to all partitions regardless # of role self.assignment = [] def _distribute(self, alarms, rebalance): """Distribute alarms over known set of evaluators. :param alarms: the alarms to distribute :param rebalance: true if this is a full rebalance :return: true if the distribution completed, false if aborted """ verb = 'assign' if rebalance else 'allocate' method = (self.coordination_rpc.assign if rebalance else self.coordination_rpc.allocate) LOG.debug(_('triggering %s') % verb) LOG.debug(_('known evaluators %s') % self.reports) per_evaluator = int(math.ceil(len(alarms) / float(len(self.reports) + 1))) LOG.debug(_('per evaluator allocation %s') % per_evaluator) # for small distributions (e.g. of newly created alarms) # we deliberately skew to non-master evaluators evaluators = self.reports.keys() random.shuffle(evaluators) offset = 0 for evaluator in evaluators: # TODO(eglynn): use pagination in the alarms API to chunk large # large allocations if self.oldest < self.this: LOG.warn(_('%(this)s bailing on distribution cycle ' 'as older partition detected: %(older)s') % dict(this=self.this, older=self.oldest)) return False allocation = alarms[offset:offset + per_evaluator] if allocation: LOG.debug(_('%(verb)s-ing %(alloc)s to %(eval)s') % dict(verb=verb, alloc=allocation, eval=evaluator)) method(evaluator.uuid, allocation) offset += per_evaluator LOG.debug(_('master taking %s for self') % alarms[offset:]) if rebalance: self.assignment = alarms[offset:] else: self.assignment.extend(alarms[offset:]) return True def _deletion_requires_rebalance(self, alarms): """Track the level of deletion activity since the last full rebalance. We delay rebalancing until a certain threshold of deletion activity has occurred. :param alarms: current set of alarms :return: True if the level of alarm deletion since the last rebalance is sufficient so as to require a full rebalance """ deleted_alarms = self.last_alarms - set(alarms) LOG.debug(_('newly deleted alarms %s') % deleted_alarms) self.deleted_alarms.update(deleted_alarms) if len(self.deleted_alarms) > len(alarms) / 5: LOG.debug(_('alarm deletion activity requires rebalance')) self.deleted_alarms = set() return True return False def _record_oldest(self, partition, stale=False): """Check if reported partition is the oldest we know about. :param partition: reported partition :param stale: true if reported partition detected as stale. """ if stale and self.oldest == partition: # current oldest partition detected as stale self.oldest = None elif not self.oldest: # no known oldest partition self.oldest = partition elif partition < self.oldest: # new oldest self.oldest = partition def _is_master(self, interval): """Determine if the current partition is the master.""" now = timeutils.utcnow() if timeutils.delta_seconds(self.start, now) < interval * 2: LOG.debug(_('%s still warming up') % self.this) return False is_master = True for partition, last_heard in self.reports.items(): delta = timeutils.delta_seconds(last_heard, now) LOG.debug(_('last heard from %(report)s %(delta)s seconds ago') % dict(report=partition, delta=delta)) if delta > interval * 2: del self.reports[partition] self._record_oldest(partition, stale=True) LOG.debug(_('%(this)s detects stale evaluator: %(stale)s') % dict(this=self.this, stale=partition)) self.presence_changed = True elif partition < self.this: is_master = False LOG.info(_('%(this)s sees older potential master: %(older)s') % dict(this=self.this, older=partition)) LOG.info(_('%(this)s is master?: %(is_master)s') % dict(this=self.this, is_master=is_master)) return is_master def _master_role(self, assuming, api_client): """Carry out the master role, initiating a distribution if required. :param assuming: true if newly assumed mastership :param api_client: the API client to use for alarms. :return: True if not overtaken by an older partition """ alarms = [a.alarm_id for a in api_client.alarms.list()] created_alarms = list(set(alarms) - self.last_alarms) LOG.debug(_('newly created alarms %s') % created_alarms) sufficient_deletion = self._deletion_requires_rebalance(alarms) if (assuming or sufficient_deletion or self.presence_changed): still_ahead = self._distribute(alarms, rebalance=True) elif created_alarms: still_ahead = self._distribute(list(created_alarms), rebalance=False) else: # nothing to distribute, but check anyway if overtaken still_ahead = self.this < self.oldest self.last_alarms = set(alarms) LOG.info(_('%(this)s not overtaken as master? %(still_ahead)s') % ({'this': self.this, 'still_ahead': still_ahead})) return still_ahead def check_mastership(self, eval_interval, api_client): """Periodically check if the mastership role should be assumed. :param eval_interval: the alarm evaluation interval :param api_client: the API client to use for alarms. """ LOG.debug(_('%s checking mastership status') % self.this) try: assuming = not self.is_master self.is_master = (self._is_master(eval_interval) and self._master_role(assuming, api_client)) self.presence_changed = False except Exception: LOG.exception(_('mastership check failed')) def presence(self, uuid, priority): """Accept an incoming report of presence.""" report = PartitionIdentity(uuid, priority) if report != self.this: if report not in self.reports: self.presence_changed = True self._record_oldest(report) self.reports[report] = timeutils.utcnow() LOG.debug(_('%(this)s knows about %(reports)s') % dict(this=self.this, reports=self.reports)) def assign(self, uuid, alarms): """Accept an incoming alarm assignment.""" if uuid == self.this.uuid: LOG.debug(_('%(this)s got assignment: %(alarms)s') % dict(this=self.this, alarms=alarms)) self.assignment = alarms def allocate(self, uuid, alarms): """Accept an incoming alarm allocation.""" if uuid == self.this.uuid: LOG.debug(_('%(this)s got allocation: %(alarms)s') % dict(this=self.this, alarms=alarms)) self.assignment.extend(alarms) def report_presence(self): """Report the presence of the current partition.""" LOG.debug(_('%s reporting presence') % self.this) try: self.coordination_rpc.presence(self.this.uuid, self.this.priority) except Exception: LOG.exception(_('presence reporting failed')) def assigned_alarms(self, api_client): """Return the alarms assigned to the current partition.""" if not self.assignment: LOG.debug(_('%s has no assigned alarms to evaluate') % self.this) return [] try: LOG.debug(_('%(this)s alarms for evaluation: %(alarms)s') % dict(this=self.this, alarms=self.assignment)) return [a for a in api_client.alarms.list(q=[{'field': 'enabled', 'value': True}]) if a.alarm_id in self.assignment] except Exception: LOG.exception(_('assignment retrieval failed')) return [] ceilometer-2014.1.5/ceilometer/alarm/partition/__init__.py0000664000567000056700000000000012540642615024575 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/collector.py0000664000567000056700000001003212540642615021725 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012-2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import socket import msgpack from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher from ceilometer.openstack.common.rpc import service as rpc_service from ceilometer.openstack.common import units from ceilometer import service OPTS = [ cfg.StrOpt('udp_address', default='0.0.0.0', help='Address to which the UDP socket is bound. Set to ' 'an empty string to disable.'), cfg.IntOpt('udp_port', default=4952, help='Port to which the UDP socket is bound.'), ] cfg.CONF.register_opts(OPTS, group="collector") cfg.CONF.import_opt('rpc_backend', 'ceilometer.openstack.common.rpc') cfg.CONF.import_opt('metering_topic', 'ceilometer.publisher.rpc', group="publisher_rpc") LOG = log.getLogger(__name__) class CollectorService(service.DispatchedService, rpc_service.Service): """Listener for the collector service.""" def start(self): """Bind the UDP socket and handle incoming data.""" if cfg.CONF.collector.udp_address: self.tg.add_thread(self.start_udp) if cfg.CONF.rpc_backend: super(CollectorService, self).start() if not cfg.CONF.collector.udp_address: # Add a dummy thread to have wait() working self.tg.add_timer(604800, lambda: None) def start_udp(self): udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) udp.bind((cfg.CONF.collector.udp_address, cfg.CONF.collector.udp_port)) self.udp_run = True while self.udp_run: # NOTE(jd) Arbitrary limit of 64K because that ought to be # enough for anybody. data, source = udp.recvfrom(64 * units.Ki) try: sample = msgpack.loads(data) except Exception: LOG.warn(_("UDP: Cannot decode data sent by %s"), str(source)) else: try: LOG.debug(_("UDP: Storing %s"), str(sample)) self.dispatcher_manager.map_method('record_metering_data', sample) except Exception: LOG.exception(_("UDP: Unable to store meter")) def stop(self): self.udp_run = False super(CollectorService, self).stop() def initialize_service_hook(self, service): '''Consumers must be declared before consume_thread start.''' # Set ourselves up as a separate worker for the metering data, # since the default for service is to use create_consumer(). self.conn.create_worker( cfg.CONF.publisher_rpc.metering_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.collector.' + cfg.CONF.publisher_rpc.metering_topic, ) def record_metering_data(self, context, data): """RPC endpoint for messages we send to ourselves. When the notification messages are re-published through the RPC publisher, this method receives them for processing. """ self.dispatcher_manager.map_method('record_metering_data', data=data) ceilometer-2014.1.5/ceilometer/sample.py0000664000567000056700000000634012540642615021227 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # Copyright © 2013 eNovance # # Authors: Doug Hellmann # Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Sample class for holding data about a metering event. A Sample doesn't really do anything, but we need a way to ensure that all of the appropriate fields have been filled in by the plugins that create them. """ import copy import uuid from oslo.config import cfg OPTS = [ cfg.StrOpt('sample_source', default='openstack', deprecated_name='counter_source', help='Source for samples emitted on this instance.'), ] cfg.CONF.register_opts(OPTS) # Fields explanation: # # Source: the source of this sample # Name: the name of the meter, must be unique # Type: the type of the meter, must be either: # - cumulative: the value is incremented and never reset to 0 # - delta: the value is reset to 0 each time it is sent # - gauge: the value is an absolute value and is not a counter # Unit: the unit of the meter # Volume: the sample value # User ID: the user ID # Project ID: the project ID # Resource ID: the resource ID # Timestamp: when the sample has been read # Resource metadata: various metadata class Sample(object): def __init__(self, name, type, unit, volume, user_id, project_id, resource_id, timestamp, resource_metadata, source=None): self.name = name self.type = type self.unit = unit self.volume = volume self.user_id = user_id self.project_id = project_id self.resource_id = resource_id self.timestamp = timestamp self.resource_metadata = resource_metadata self.source = source or cfg.CONF.sample_source self.id = str(uuid.uuid1()) def as_dict(self): return copy.copy(self.__dict__) @classmethod def from_notification(cls, name, type, volume, unit, user_id, project_id, resource_id, message, source=None): metadata = copy.copy(message['payload']) metadata['event_type'] = message['event_type'] metadata['host'] = message['publisher_id'] return cls(name=name, type=type, volume=volume, unit=unit, user_id=user_id, project_id=project_id, resource_id=resource_id, timestamp=message['timestamp'], resource_metadata=metadata, source=source) TYPE_GAUGE = 'gauge' TYPE_DELTA = 'delta' TYPE_CUMULATIVE = 'cumulative' TYPES = (TYPE_GAUGE, TYPE_DELTA, TYPE_CUMULATIVE) ceilometer-2014.1.5/ceilometer/pipeline.py0000664000567000056700000004554112540642615021561 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 Intel Corp. # Copyright © 2014 Red Hat, Inc # # Authors: Yunhong Jiang # Eoghan Glynn # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fnmatch import itertools import operator import os from oslo.config import cfg import yaml from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer import publisher from ceilometer import transformer as xformer OPTS = [ cfg.StrOpt('pipeline_cfg_file', default="pipeline.yaml", help="Configuration file for pipeline definition." ), ] cfg.CONF.register_opts(OPTS) LOG = log.getLogger(__name__) class PipelineException(Exception): def __init__(self, message, pipeline_cfg): self.msg = message self.pipeline_cfg = pipeline_cfg def __str__(self): return 'Pipeline %s: %s' % (self.pipeline_cfg, self.msg) class PublishContext(object): def __init__(self, context, pipelines=[]): self.pipelines = set(pipelines) self.context = context def add_pipelines(self, pipelines): self.pipelines.update(pipelines) def __enter__(self): def p(samples): for p in self.pipelines: p.publish_samples(self.context, samples) return p def __exit__(self, exc_type, exc_value, traceback): for p in self.pipelines: p.flush(self.context) class Source(object): """Represents a source of samples, in effect a set of pollsters and/or notification handlers emitting samples for a set of matching meters. Each source encapsulates meter name matching, polling interval determination, optional resource enumeration or discovery, and mapping to one or more sinks for publication. """ def __init__(self, cfg): self.cfg = cfg try: self.name = cfg['name'] try: self.interval = int(cfg['interval']) except ValueError: raise PipelineException("Invalid interval value", cfg) # Support 'counters' for backward compatibility self.meters = cfg.get('meters', cfg.get('counters')) self.sinks = cfg.get('sinks') except KeyError as err: raise PipelineException( "Required field %s not specified" % err.args[0], cfg) if self.interval <= 0: raise PipelineException("Interval value should > 0", cfg) self.resources = cfg.get('resources') or [] if not isinstance(self.resources, list): raise PipelineException("Resources should be a list", cfg) self.discovery = cfg.get('discovery') or [] if not isinstance(self.discovery, list): raise PipelineException("Discovery should be a list", cfg) self._check_meters() def __str__(self): return self.name def _check_meters(self): """Meter rules checking At least one meaningful meter exist Included type and excluded type meter can't co-exist at the same pipeline Included type meter and wildcard can't co-exist at same pipeline """ meters = self.meters if not meters: raise PipelineException("No meter specified", self.cfg) if [x for x in meters if x[0] not in '!*'] and \ [x for x in meters if x[0] == '!']: raise PipelineException( "Both included and excluded meters specified", cfg) if '*' in meters and [x for x in meters if x[0] not in '!*']: raise PipelineException( "Included meters specified with wildcard", self.cfg) # (yjiang5) To support meters like instance:m1.tiny, # which include variable part at the end starting with ':'. # Hope we will not add such meters in future. @staticmethod def _variable_meter_name(name): m = name.partition(':') if m[1] == ':': return m[1].join((m[0], '*')) else: return name def support_meter(self, meter_name): meter_name = self._variable_meter_name(meter_name) # Special case: if we only have negation, we suppose the default is # allow default = all(meter.startswith('!') for meter in self.meters) # Support wildcard like storage.* and !disk.* # Start with negation, we consider that the order is deny, allow if any(fnmatch.fnmatch(meter_name, meter[1:]) for meter in self.meters if meter[0] == '!'): return False if any(fnmatch.fnmatch(meter_name, meter) for meter in self.meters if meter[0] != '!'): return True return default def check_sinks(self, sinks): if not self.sinks: raise PipelineException( "No sink defined in source %s" % self, self.cfg) for sink in self.sinks: if sink not in sinks: raise PipelineException( "Dangling sink %s from source %s" % (sink, self), self.cfg) class Sink(object): """Represents a sink for the transformation and publication of samples emitted from a related source. Each sink config is concerned *only* with the transformation rules and publication conduits for samples. In effect, a sink describes a chain of handlers. The chain starts with zero or more transformers and ends with one or more publishers. The first transformer in the chain is passed samples from the corresponding source, takes some action such as deriving rate of change, performing unit conversion, or aggregating, before passing the modified sample to next step. The subsequent transformers, if any, handle the data similarly. At the end of the chain, publishers publish the data. The exact publishing method depends on publisher type, for example, pushing into data storage via the message bus providing guaranteed delivery, or for loss-tolerant samples UDP may be used. If no transformers are included in the chain, the publishers are passed samples directly from the sink which are published unchanged. """ def __init__(self, cfg, transformer_manager): self.cfg = cfg try: self.name = cfg['name'] # It's legal to have no transformer specified self.transformer_cfg = cfg['transformers'] or [] except KeyError as err: raise PipelineException( "Required field %s not specified" % err.args[0], cfg) if not cfg.get('publishers'): raise PipelineException("No publisher specified", cfg) self.publishers = [] for p in cfg['publishers']: if '://' not in p: # Support old format without URL p = p + "://" try: self.publishers.append(publisher.get_publisher(p)) except Exception: LOG.exception(_("Unable to load publisher %s"), p) self.transformers = self._setup_transformers(cfg, transformer_manager) def __str__(self): return self.name def _setup_transformers(self, cfg, transformer_manager): transformer_cfg = cfg['transformers'] or [] transformers = [] for transformer in transformer_cfg: parameter = transformer['parameters'] or {} try: ext = transformer_manager.get_ext(transformer['name']) except KeyError: raise PipelineException( "No transformer named %s loaded" % transformer['name'], cfg) transformers.append(ext.plugin(**parameter)) LOG.info(_( "Pipeline %(pipeline)s: Setup transformer instance %(name)s " "with parameter %(param)s") % ({'pipeline': self, 'name': transformer['name'], 'param': parameter})) return transformers def _transform_sample(self, start, ctxt, sample): try: for transformer in self.transformers[start:]: sample = transformer.handle_sample(ctxt, sample) if not sample: LOG.debug(_( "Pipeline %(pipeline)s: Sample dropped by " "transformer %(trans)s") % ({'pipeline': self, 'trans': transformer})) return return sample except Exception as err: LOG.warning(_("Pipeline %(pipeline)s: " "Exit after error from transformer " "%(trans)s for %(smp)s") % ({'pipeline': self, 'trans': transformer, 'smp': sample})) LOG.exception(err) def _publish_samples(self, start, ctxt, samples): """Push samples into pipeline for publishing. :param start: The first transformer that the sample will be injected. This is mainly for flush() invocation that transformer may emit samples. :param ctxt: Execution context from the manager or service. :param samples: Sample list. """ transformed_samples = [] for sample in samples: LOG.debug(_( "Pipeline %(pipeline)s: Transform sample " "%(smp)s from %(trans)s transformer") % ({'pipeline': self, 'smp': sample, 'trans': start})) sample = self._transform_sample(start, ctxt, sample) if sample: transformed_samples.append(sample) if transformed_samples: for p in self.publishers: try: p.publish_samples(ctxt, transformed_samples) except Exception: LOG.exception(_( "Pipeline %(pipeline)s: Continue after error " "from publisher %(pub)s") % ({'pipeline': self, 'pub': p})) def publish_samples(self, ctxt, samples): for meter_name, samples in itertools.groupby( sorted(samples, key=operator.attrgetter('name')), operator.attrgetter('name')): self._publish_samples(0, ctxt, samples) def flush(self, ctxt): """Flush data after all samples have been injected to pipeline.""" for (i, transformer) in enumerate(self.transformers): try: self._publish_samples(i + 1, ctxt, list(transformer.flush(ctxt))) except Exception as err: LOG.warning(_( "Pipeline %(pipeline)s: Error flushing " "transformer %(trans)s") % ({'pipeline': self, 'trans': transformer})) LOG.exception(err) class Pipeline(object): """Represents a coupling between a sink and a corresponding source. """ def __init__(self, source, sink): self.source = source self.sink = sink self.name = str(self) def __str__(self): return (self.source.name if self.source.name == self.sink.name else '%s:%s' % (self.source.name, self.sink.name)) def get_interval(self): return self.source.interval @property def resources(self): return self.source.resources @property def discovery(self): return self.source.discovery def support_meter(self, meter_name): return self.source.support_meter(meter_name) @property def publishers(self): return self.sink.publishers def publish_sample(self, ctxt, sample): self.publish_samples(ctxt, [sample]) def publish_samples(self, ctxt, samples): supported = [s for s in samples if self.source.support_meter(s.name)] self.sink.publish_samples(ctxt, supported) def flush(self, ctxt): self.sink.flush(ctxt) class PipelineManager(object): """Pipeline Manager Pipeline manager sets up pipelines according to config file Usually only one pipeline manager exists in the system. """ def __init__(self, cfg, transformer_manager): """Setup the pipelines according to config. The configuration is supported in one of two forms: 1. Deprecated: the source and sink configuration are conflated as a list of consolidated pipelines. The pipelines are defined as a list of dictionaries each specifying the target samples, the transformers involved, and the target publishers, for example: [{"name": pipeline_1, "interval": interval_time, "meters" : ["meter_1", "meter_2"], "resources": ["resource_uri1", "resource_uri2"], "transformers": [ {"name": "Transformer_1", "parameters": {"p1": "value"}}, {"name": "Transformer_2", "parameters": {"p1": "value"}}, ], "publishers": ["publisher_1", "publisher_2"] }, {"name": pipeline_2, "interval": interval_time, "meters" : ["meter_3"], "publishers": ["publisher_3"] }, ] 2. Decoupled: the source and sink configuration are separately specified before being linked together. This allows source- specific configuration, such as resource discovery, to be kept focused only on the fine-grained source while avoiding the necessity for wide duplication of sink-related config. The configuration is provided in the form of separate lists of dictionaries defining sources and sinks, for example: {"sources": [{"name": source_1, "interval": interval_time, "meters" : ["meter_1", "meter_2"], "resources": ["resource_uri1", "resource_uri2"], "sinks" : ["sink_1", "sink_2"] }, {"name": source_2, "interval": interval_time, "meters" : ["meter_3"], "sinks" : ["sink_2"] }, ], "sinks": [{"name": sink_1, "transformers": [ {"name": "Transformer_1", "parameters": {"p1": "value"}}, {"name": "Transformer_2", "parameters": {"p1": "value"}}, ], "publishers": ["publisher_1", "publisher_2"] }, {"name": sink_2, "publishers": ["publisher_3"] }, ] } The semantics of the common individual configuration elements are identical in the deprecated and decoupled version. The interval determines the cadence of sample injection into the pipeline where samples are produced under the direct control of an agent, i.e. via a polling cycle as opposed to incoming notifications. Valid meter format is '*', '!meter_name', or 'meter_name'. '*' is wildcard symbol means any meters; '!meter_name' means "meter_name" will be excluded; 'meter_name' means 'meter_name' will be included. The 'meter_name" is Sample name field. For meter names with variable like "instance:m1.tiny", it's "instance:*". Valid meters definition is all "included meter names", all "excluded meter names", wildcard and "excluded meter names", or only wildcard. The resources is list of URI indicating the resources from where the meters should be polled. It's optional and it's up to the specific pollster to decide how to use it. Transformer's name is plugin name in setup.cfg. Publisher's name is plugin name in setup.cfg """ self.pipelines = [] if 'sources' in cfg or 'sinks' in cfg: if not ('sources' in cfg and 'sinks' in cfg): raise PipelineException("Both sources & sinks are required", cfg) LOG.info(_('detected decoupled pipeline config format')) sources = [Source(s) for s in cfg.get('sources', [])] sinks = dict((s['name'], Sink(s, transformer_manager)) for s in cfg.get('sinks', [])) for source in sources: source.check_sinks(sinks) for target in source.sinks: self.pipelines.append(Pipeline(source, sinks[target])) else: LOG.warning(_('detected deprecated pipeline config format')) for pipedef in cfg: source = Source(pipedef) sink = Sink(pipedef, transformer_manager) self.pipelines.append(Pipeline(source, sink)) def publisher(self, context): """Build a new Publisher for these manager pipelines. :param context: The context. """ return PublishContext(context, self.pipelines) def setup_pipeline(transformer_manager=None): """Setup pipeline manager according to yaml config file.""" cfg_file = cfg.CONF.pipeline_cfg_file if not os.path.exists(cfg_file): cfg_file = cfg.CONF.find_file(cfg_file) LOG.debug(_("Pipeline config file: %s"), cfg_file) with open(cfg_file) as fap: data = fap.read() pipeline_cfg = yaml.safe_load(data) LOG.info(_("Pipeline config: %s"), pipeline_cfg) return PipelineManager(pipeline_cfg, transformer_manager or xformer.TransformerExtensionManager( 'ceilometer.transformer', )) ceilometer-2014.1.5/ceilometer/cli.py0000664000567000056700000001414412540642615020516 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright © 2012-2014 Julien Danjou # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Command line tool for creating meter for Ceilometer. """ import logging import sys import eventlet # NOTE(jd) We need to monkey patch the socket and select module for, # at least, oslo.rpc, otherwise everything's blocked on its first read() # or select() eventlet.monkey_patch(socket=True, select=True, thread=True) from oslo.config import cfg from ceilometer.alarm import service as alarm_service from ceilometer.api import app from ceilometer.central import manager as central_manager from ceilometer import collector from ceilometer.compute import manager as compute_manager from ceilometer import notification from ceilometer.openstack.common import context from ceilometer.openstack.common import importutils from ceilometer.openstack.common import service as os_service from ceilometer.openstack.common import timeutils from ceilometer import pipeline from ceilometer import sample from ceilometer import service from ceilometer import storage from ceilometer import transformer OPTS = [ cfg.StrOpt('evaluation_service', default='ceilometer.alarm.service.SingletonAlarmService', help='Class to launch as alarm evaluation service.'), ] cfg.CONF.register_opts(OPTS, group='alarm') cfg.CONF.import_opt('time_to_live', 'ceilometer.storage', group='database') LOG = logging.getLogger(__name__) def alarm_notifier(): service.prepare_service() os_service.launch(alarm_service.AlarmNotifierService( cfg.CONF.host, 'ceilometer.alarm')).wait() def alarm_evaluator(): service.prepare_service() eval_service = importutils.import_object(cfg.CONF.alarm.evaluation_service) os_service.launch(eval_service).wait() def agent_central(): service.prepare_service() os_service.launch(central_manager.AgentManager()).wait() def agent_compute(): service.prepare_service() os_service.launch(compute_manager.AgentManager()).wait() def agent_notification(): service.prepare_service() launcher = os_service.ProcessLauncher() launcher.launch_service( notification.NotificationService(cfg.CONF.host, 'ceilometer.agent.notification'), workers=service.get_workers('notification')) launcher.wait() def api(): service.prepare_service() srv = app.build_server() srv.serve_forever() def collector_service(): service.prepare_service() launcher = os_service.ProcessLauncher() launcher.launch_service( collector.CollectorService(cfg.CONF.host, 'ceilometer.collector'), workers=service.get_workers('collector')) launcher.wait() def storage_dbsync(): service.prepare_service() storage.get_connection(cfg.CONF).upgrade() def storage_expirer(): service.prepare_service() if cfg.CONF.database.time_to_live > 0: LOG.debug(_("Clearing expired metering data")) storage_conn = storage.get_connection(cfg.CONF) storage_conn.clear_expired_metering_data( cfg.CONF.database.time_to_live) else: LOG.info(_("Nothing to clean, database time to live is disabled")) def send_sample(): cfg.CONF.register_cli_opts([ cfg.StrOpt('sample-name', short='n', help='Meter name.', required=True), cfg.StrOpt('sample-type', short='y', help='Meter type (gauge, delta, cumulative).', default='gauge', required=True), cfg.StrOpt('sample-unit', short='U', help='Meter unit.', default=None), cfg.IntOpt('sample-volume', short='l', help='Meter volume value.', default=1), cfg.StrOpt('sample-resource', short='r', help='Meter resource id.', required=True), cfg.StrOpt('sample-user', short='u', help='Meter user id.'), cfg.StrOpt('sample-project', short='p', help='Meter project id.'), cfg.StrOpt('sample-timestamp', short='i', help='Meter timestamp.', default=timeutils.utcnow().isoformat()), cfg.StrOpt('sample-metadata', short='m', help='Meter metadata.'), ]) service.prepare_service() # Set up logging to use the console console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) root_logger = logging.getLogger('') root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager( 'ceilometer.transformer', ), ) with pipeline_manager.publisher(context.get_admin_context()) as p: p([sample.Sample( name=cfg.CONF.sample_name, type=cfg.CONF.sample_type, unit=cfg.CONF.sample_unit, volume=cfg.CONF.sample_volume, user_id=cfg.CONF.sample_user, project_id=cfg.CONF.sample_project, resource_id=cfg.CONF.sample_resource, timestamp=cfg.CONF.sample_timestamp, resource_metadata=cfg.CONF.sample_metadata and eval( cfg.CONF.sample_metadata))]) ceilometer-2014.1.5/ceilometer/image/0000775000567000056700000000000012540643223020447 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/image/notifications.py0000664000567000056700000000760412540642615023705 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 Red Hat, Inc # # Author: Eoghan Glynn # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Handler for producing image metering messages from glance notification events. """ from oslo.config import cfg from ceilometer import plugin from ceilometer import sample OPTS = [ cfg.StrOpt('glance_control_exchange', default='glance', help="Exchange name for Glance notifications."), ] cfg.CONF.register_opts(OPTS) class ImageBase(plugin.NotificationBase): """Base class for image counting.""" @staticmethod def get_exchange_topics(conf): """Return a sequence of ExchangeTopics defining the exchange and topics to be connected for this plugin. """ return [ plugin.ExchangeTopics( exchange=conf.glance_control_exchange, topics=set(topic + ".info" for topic in conf.notification_topics)), ] class ImageCRUDBase(ImageBase): event_types = [ 'image.update', 'image.upload', 'image.delete', ] class ImageCRUD(ImageCRUDBase): def process_notification(self, message): yield sample.Sample.from_notification( name=message['event_type'], type=sample.TYPE_DELTA, unit='image', volume=1, resource_id=message['payload']['id'], user_id=None, project_id=message['payload']['owner'], message=message) class Image(ImageCRUDBase): def process_notification(self, message): yield sample.Sample.from_notification( name='image', type=sample.TYPE_GAUGE, unit='image', volume=1, resource_id=message['payload']['id'], user_id=None, project_id=message['payload']['owner'], message=message) class ImageSize(ImageCRUDBase): def process_notification(self, message): yield sample.Sample.from_notification( name='image.size', type=sample.TYPE_GAUGE, unit='B', volume=message['payload']['size'], resource_id=message['payload']['id'], user_id=None, project_id=message['payload']['owner'], message=message) class ImageDownload(ImageBase): """Emit image_download sample when an image is downloaded.""" event_types = ['image.send'] def process_notification(self, message): yield sample.Sample.from_notification( name='image.download', type=sample.TYPE_DELTA, unit='B', volume=message['payload']['bytes_sent'], resource_id=message['payload']['image_id'], user_id=message['payload']['receiver_user_id'], project_id=message['payload']['receiver_tenant_id'], message=message) class ImageServe(ImageBase): """Emit image_serve sample when an image is served out.""" event_types = ['image.send'] def process_notification(self, message): yield sample.Sample.from_notification( name='image.serve', type=sample.TYPE_DELTA, unit='B', volume=message['payload']['bytes_sent'], resource_id=message['payload']['image_id'], user_id=None, project_id=message['payload']['owner_id'], message=message) ceilometer-2014.1.5/ceilometer/image/__init__.py0000664000567000056700000000000012540642613022550 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/image/glance.py0000664000567000056700000001172212540642615022261 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Common code for working with images """ from __future__ import absolute_import import itertools import glanceclient from oslo.config import cfg from ceilometer.openstack.common import timeutils from ceilometer import plugin from ceilometer import sample class _Base(plugin.PollsterBase): @staticmethod def get_glance_client(ksclient): endpoint = ksclient.service_catalog.url_for( service_type='image', endpoint_type=cfg.CONF.service_credentials.os_endpoint_type) # hard-code v1 glance API version selection while v2 API matures service_credentials = cfg.CONF.service_credentials return glanceclient.Client('1', endpoint, token=ksclient.auth_token, cacert=service_credentials.os_cacert, insecure=service_credentials.insecure) def _get_images(self, ksclient): client = self.get_glance_client(ksclient) #TODO(eglynn): use pagination to protect against unbounded # memory usage rawImageList = list(itertools.chain( client.images.list(filters={"is_public": True}), #TODO(eglynn): extend glance API with all_tenants logic to # avoid second call to retrieve private images client.images.list(filters={"is_public": False}))) # When retrieving images from glance, glance will check # whether the user is of 'admin_role' which is # configured in glance-api.conf. If the user is of # admin_role, and is querying public images(which means # that the 'is_public' param is set to be True), # glance will ignore 'is_public' parameter and returns # all the public images together with private images. # As a result, if the user/tenant has an admin role # for ceilometer to collect image list, # the _Base.iter_images method will return a image list # which contains duplicate images. Add the following # code to avoid recording down duplicate image events. imageIdSet = set(image.id for image in rawImageList) for image in rawImageList: if image.id in imageIdSet: imageIdSet -= set([image.id]) yield image def _iter_images(self, ksclient, cache): """Iterate over all images.""" if 'images' not in cache: cache['images'] = list(self._get_images(ksclient)) return iter(cache['images']) @staticmethod def extract_image_metadata(image): return dict((k, getattr(image, k)) for k in [ "status", "is_public", "name", "deleted", "container_format", "created_at", "disk_format", "updated_at", "properties", "min_disk", "protected", "checksum", "deleted_at", "min_ram", "size", ]) class ImagePollster(_Base): def get_samples(self, manager, cache, resources=[]): for image in self._iter_images(manager.keystone, cache): yield sample.Sample( name='image', type=sample.TYPE_GAUGE, unit='image', volume=1, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), ) class ImageSizePollster(_Base): def get_samples(self, manager, cache, resources=[]): for image in self._iter_images(manager.keystone, cache): yield sample.Sample( name='image.size', type=sample.TYPE_GAUGE, unit='B', volume=image.size, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), ) ceilometer-2014.1.5/ceilometer/openstack/0000775000567000056700000000000012540643223021354 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/0000775000567000056700000000000012540643223022644 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/lockutils.py0000664000567000056700000002234512540642615025241 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import errno import functools import os import shutil import subprocess import sys import tempfile import threading import time import weakref from oslo.config import cfg from ceilometer.openstack.common import fileutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging LOG = logging.getLogger(__name__) util_opts = [ cfg.BoolOpt('disable_process_locking', default=False, help='Whether to disable inter-process locks.'), cfg.StrOpt('lock_path', default=os.environ.get("CEILOMETER_LOCK_PATH"), help=('Directory to use for lock files.')) ] CONF = cfg.CONF CONF.register_opts(util_opts) def set_defaults(lock_path): cfg.set_defaults(util_opts, lock_path=lock_path) class _InterProcessLock(object): """Lock implementation which allows multiple locks, working around issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does not require any cleanup. Since the lock is always held on a file descriptor rather than outside of the process, the lock gets dropped automatically if the process crashes, even if __exit__ is not executed. There are no guarantees regarding usage by multiple green threads in a single process here. This lock works only between processes. Exclusive access between local threads should be achieved using the semaphores in the @synchronized decorator. Note these locks are released when the descriptor is closed, so it's not safe to close the file descriptor while another green thread holds the lock. Just opening and closing the lock file can break synchronisation, so lock files must be accessed only using this abstraction. """ def __init__(self, name): self.lockfile = None self.fname = name def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): fileutils.ensure_tree(basedir) LOG.info(_('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug(_('Got file lock "%s"'), self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError(_("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, }) def __enter__(self): self.acquire() return self def release(self): try: self.unlock() self.lockfile.close() LOG.debug(_('Released file lock "%s"'), self.fname) except IOError: LOG.exception(_("Could not release the acquired lock `%s`"), self.fname) def __exit__(self, exc_type, exc_val, exc_tb): self.release() def trylock(self): raise NotImplementedError() def unlock(self): raise NotImplementedError() class _WindowsLock(_InterProcessLock): def trylock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) def unlock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) class _PosixLock(_InterProcessLock): def trylock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) def unlock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_UN) if os.name == 'nt': import msvcrt InterProcessLock = _WindowsLock else: import fcntl InterProcessLock = _PosixLock _semaphores = weakref.WeakValueDictionary() _semaphores_lock = threading.Lock() def external_lock(name, lock_file_prefix=None): with internal_lock(name): LOG.debug(_('Attempting to grab external lock "%(lock)s"'), {'lock': name}) # NOTE(mikal): the lock name cannot contain directory # separators name = name.replace(os.sep, '_') if lock_file_prefix: sep = '' if lock_file_prefix.endswith('-') else '-' name = '%s%s%s' % (lock_file_prefix, sep, name) if not CONF.lock_path: raise cfg.RequiredOptError('lock_path') lock_file_path = os.path.join(CONF.lock_path, name) return InterProcessLock(lock_file_path) def internal_lock(name): with _semaphores_lock: try: sem = _semaphores[name] except KeyError: sem = threading.Semaphore() _semaphores[name] = sem LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) return sem @contextlib.contextmanager def lock(name, lock_file_prefix=None, external=False): """Context based lock This function yields a `threading.Semaphore` instance (if we don't use eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is True, in which case, it'll yield an InterProcessLock instance. :param lock_file_prefix: The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. :param external: The external keyword argument denotes whether this lock should work across multiple processes. This means that if two different workers both run a a method decorated with @synchronized('mylock', external=True), only one of them will execute at a time. """ if external and not CONF.disable_process_locking: lock = external_lock(name, lock_file_prefix) else: lock = internal_lock(name) with lock: yield lock def synchronized(name, lock_file_prefix=None, external=False): """Synchronization decorator. Decorating a method like so:: @synchronized('mylock') def foo(self, *args): ... ensures that only one thread will execute the foo method at a time. Different methods can share the same lock:: @synchronized('mylock') def foo(self, *args): ... @synchronized('mylock') def bar(self, *args): ... This way only one of either foo or bar can be executing at a time. """ def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): try: with lock(name, lock_file_prefix, external): LOG.debug(_('Got semaphore / lock "%(function)s"'), {'function': f.__name__}) return f(*args, **kwargs) finally: LOG.debug(_('Semaphore / lock released "%(function)s"'), {'function': f.__name__}) return inner return wrap def synchronized_with_prefix(lock_file_prefix): """Partial object generator for the synchronization decorator. Redefine @synchronized in each project like so:: (in nova/utils.py) from nova.openstack.common import lockutils synchronized = lockutils.synchronized_with_prefix('nova-') (in nova/foo.py) from nova import utils @utils.synchronized('mylock') def bar(self, *args): ... The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. """ return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) def main(argv): """Create a dir for locks and pass it to command from arguments If you run this: python -m openstack.common.lockutils python setup.py testr a temporary directory will be created for all your locks and passed to all your tests in an environment variable. The temporary dir will be deleted afterwards and the return value will be preserved. """ lock_dir = tempfile.mkdtemp() os.environ["CEILOMETER_LOCK_PATH"] = lock_dir try: ret_val = subprocess.call(argv[1:]) finally: shutil.rmtree(lock_dir, ignore_errors=True) return ret_val if __name__ == '__main__': sys.exit(main(sys.argv)) ceilometer-2014.1.5/ceilometer/openstack/common/db/0000775000567000056700000000000012540643223023231 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/db/exception.py0000664000567000056700000000350312540642615025606 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """DB related custom exceptions.""" from ceilometer.openstack.common.gettextutils import _ class DBError(Exception): """Wraps an implementation specific exception.""" def __init__(self, inner_exception=None): self.inner_exception = inner_exception super(DBError, self).__init__(str(inner_exception)) class DBDuplicateEntry(DBError): """Wraps an implementation specific exception.""" def __init__(self, columns=[], inner_exception=None): self.columns = columns super(DBDuplicateEntry, self).__init__(inner_exception) class DBDeadlock(DBError): def __init__(self, inner_exception=None): super(DBDeadlock, self).__init__(inner_exception) class DBInvalidUnicodeParameter(Exception): message = _("Invalid Parameter: " "Unicode is not supported by the current database.") class DbMigrationError(DBError): """Wraps migration specific exception.""" def __init__(self, message=None): super(DbMigrationError, self).__init__(str(message)) class DBConnectionError(DBError): """Wraps connection specific exception.""" pass ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/0000775000567000056700000000000012540643223025373 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/provision.py0000664000567000056700000001312112540642615027777 0ustar jenkinsjenkins00000000000000# Copyright 2013 Mirantis.inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Provision test environment for specific DB backends""" import argparse import os import random import string from six import moves import sqlalchemy from ceilometer.openstack.common.db import exception as exc SQL_CONNECTION = os.getenv('OS_TEST_DBAPI_ADMIN_CONNECTION', 'sqlite://') def _gen_credentials(*names): """Generate credentials.""" auth_dict = {} for name in names: val = ''.join(random.choice(string.ascii_lowercase) for i in moves.range(10)) auth_dict[name] = val return auth_dict def _get_engine(uri=SQL_CONNECTION): """Engine creation By default the uri is SQL_CONNECTION which is admin credentials. Call the function without arguments to get admin connection. Admin connection required to create temporary user and database for each particular test. Otherwise use existing connection to recreate connection to the temporary database. """ return sqlalchemy.create_engine(uri, poolclass=sqlalchemy.pool.NullPool) def _execute_sql(engine, sql, driver): """Initialize connection, execute sql query and close it.""" try: with engine.connect() as conn: if driver == 'postgresql': conn.connection.set_isolation_level(0) for s in sql: conn.execute(s) except sqlalchemy.exc.OperationalError: msg = ('%s does not match database admin ' 'credentials or database does not exist.') raise exc.DBConnectionError(msg % SQL_CONNECTION) def create_database(engine): """Provide temporary user and database for each particular test.""" driver = engine.name auth = _gen_credentials('database', 'user', 'passwd') sqls = { 'mysql': [ "drop database if exists %(database)s;", "grant all on %(database)s.* to '%(user)s'@'localhost'" " identified by '%(passwd)s';", "create database %(database)s;", ], 'postgresql': [ "drop database if exists %(database)s;", "drop user if exists %(user)s;", "create user %(user)s with password '%(passwd)s';", "create database %(database)s owner %(user)s;", ] } if driver == 'sqlite': return 'sqlite:////tmp/%s' % auth['database'] try: sql_rows = sqls[driver] except KeyError: raise ValueError('Unsupported RDBMS %s' % driver) sql_query = map(lambda x: x % auth, sql_rows) _execute_sql(engine, sql_query, driver) params = auth.copy() params['backend'] = driver return "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % params def drop_database(engine, current_uri): """Drop temporary database and user after each particular test.""" engine = _get_engine(current_uri) admin_engine = _get_engine() driver = engine.name auth = {'database': engine.url.database, 'user': engine.url.username} if driver == 'sqlite': try: os.remove(auth['database']) except OSError: pass return sqls = { 'mysql': [ "drop database if exists %(database)s;", "drop user '%(user)s'@'localhost';", ], 'postgresql': [ "drop database if exists %(database)s;", "drop user if exists %(user)s;", ] } try: sql_rows = sqls[driver] except KeyError: raise ValueError('Unsupported RDBMS %s' % driver) sql_query = map(lambda x: x % auth, sql_rows) _execute_sql(admin_engine, sql_query, driver) def main(): """Controller to handle commands ::create: Create test user and database with random names. ::drop: Drop user and database created by previous command. """ parser = argparse.ArgumentParser( description='Controller to handle database creation and dropping' ' commands.', epilog='Under normal circumstances is not used directly.' ' Used in .testr.conf to automate test database creation' ' and dropping processes.') subparsers = parser.add_subparsers( help='Subcommands to manipulate temporary test databases.') create = subparsers.add_parser( 'create', help='Create temporary test ' 'databases and users.') create.set_defaults(which='create') create.add_argument( 'instances_count', type=int, help='Number of databases to create.') drop = subparsers.add_parser( 'drop', help='Drop temporary test databases and users.') drop.set_defaults(which='drop') drop.add_argument( 'instances', nargs='+', help='List of databases uri to be dropped.') args = parser.parse_args() engine = _get_engine() which = args.which if which == "create": for i in range(int(args.instances_count)): print(create_database(engine)) elif which == "drop": for db in args.instances: drop_database(engine, db) if __name__ == "__main__": main() ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/test_base.py0000664000567000056700000001147512540642615027732 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import functools import os import fixtures from oslo.config import cfg import six from ceilometer.openstack.common.db.sqlalchemy import session from ceilometer.openstack.common.db.sqlalchemy import utils from ceilometer.openstack.common import test class DbFixture(fixtures.Fixture): """Basic database fixture. Allows to run tests on various db backends, such as SQLite, MySQL and PostgreSQL. By default use sqlite backend. To override default backend uri set env variable OS_TEST_DBAPI_CONNECTION with database admin credentials for specific backend. """ def _get_uri(self): return os.getenv('OS_TEST_DBAPI_CONNECTION', 'sqlite://') def __init__(self): super(DbFixture, self).__init__() self.conf = cfg.CONF self.conf.import_opt('connection', 'ceilometer.openstack.common.db.sqlalchemy.session', group='database') def setUp(self): super(DbFixture, self).setUp() self.conf.set_default('connection', self._get_uri(), group='database') self.addCleanup(self.conf.reset) class DbTestCase(test.BaseTestCase): """Base class for testing of DB code. Using `DbFixture`. Intended to be the main database test case to use all the tests on a given backend with user defined uri. Backend specific tests should be decorated with `backend_specific` decorator. """ FIXTURE = DbFixture def setUp(self): super(DbTestCase, self).setUp() self.useFixture(self.FIXTURE()) self.addCleanup(session.cleanup) ALLOWED_DIALECTS = ['sqlite', 'mysql', 'postgresql'] def backend_specific(*dialects): """Decorator to skip backend specific tests on inappropriate engines. ::dialects: list of dialects names under which the test will be launched. """ def wrap(f): @functools.wraps(f) def ins_wrap(self): if not set(dialects).issubset(ALLOWED_DIALECTS): raise ValueError( "Please use allowed dialects: %s" % ALLOWED_DIALECTS) engine = session.get_engine() if engine.name not in dialects: msg = ('The test "%s" can be run ' 'only on %s. Current engine is %s.') args = (f.__name__, ' '.join(dialects), engine.name) self.skip(msg % args) else: return f(self) return ins_wrap return wrap @six.add_metaclass(abc.ABCMeta) class OpportunisticFixture(DbFixture): """Base fixture to use default CI databases. The databases exist in OpenStack CI infrastructure. But for the correct functioning in local environment the databases must be created manually. """ DRIVER = abc.abstractproperty(lambda: None) DBNAME = PASSWORD = USERNAME = 'openstack_citest' def _get_uri(self): return utils.get_connect_string(backend=self.DRIVER, user=self.USERNAME, passwd=self.PASSWORD, database=self.DBNAME) @six.add_metaclass(abc.ABCMeta) class OpportunisticTestCase(DbTestCase): """Base test case to use default CI databases. The subclasses of the test case are running only when openstack_citest database is available otherwise a tests will be skipped. """ FIXTURE = abc.abstractproperty(lambda: None) def setUp(self): credentials = { 'backend': self.FIXTURE.DRIVER, 'user': self.FIXTURE.USERNAME, 'passwd': self.FIXTURE.PASSWORD, 'database': self.FIXTURE.DBNAME} if self.FIXTURE.DRIVER and not utils.is_backend_avail(**credentials): msg = '%s backend is not available.' % self.FIXTURE.DRIVER return self.skip(msg) super(OpportunisticTestCase, self).setUp() class MySQLOpportunisticFixture(OpportunisticFixture): DRIVER = 'mysql' class PostgreSQLOpportunisticFixture(OpportunisticFixture): DRIVER = 'postgresql' class MySQLOpportunisticTestCase(OpportunisticTestCase): FIXTURE = MySQLOpportunisticFixture class PostgreSQLOpportunisticTestCase(OpportunisticTestCase): FIXTURE = PostgreSQLOpportunisticFixture ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/models.py0000664000567000056700000000761312540642615027243 0ustar jenkinsjenkins00000000000000# Copyright (c) 2011 X.commerce, a business unit of eBay Inc. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Piston Cloud Computing, Inc. # Copyright 2012 Cloudscaling Group, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SQLAlchemy models. """ import six from sqlalchemy import Column, Integer from sqlalchemy import DateTime from sqlalchemy.orm import object_mapper from ceilometer.openstack.common.db.sqlalchemy import session as sa from ceilometer.openstack.common import timeutils class ModelBase(object): """Base class for models.""" __table_initialized__ = False def save(self, session=None): """Save this object.""" if not session: session = sa.get_session() # NOTE(boris-42): This part of code should be look like: # session.add(self) # session.flush() # But there is a bug in sqlalchemy and eventlet that # raises NoneType exception if there is no running # transaction and rollback is called. As long as # sqlalchemy has this bug we have to create transaction # explicitly. with session.begin(subtransactions=True): session.add(self) session.flush() def __setitem__(self, key, value): setattr(self, key, value) def __getitem__(self, key): return getattr(self, key) def get(self, key, default=None): return getattr(self, key, default) @property def _extra_keys(self): """Specifies custom fields Subclasses can override this property to return a list of custom fields that should be included in their dict representation. For reference check tests/db/sqlalchemy/test_models.py """ return [] def __iter__(self): columns = dict(object_mapper(self).columns).keys() # NOTE(russellb): Allow models to specify other keys that can be looked # up, beyond the actual db columns. An example would be the 'name' # property for an Instance. columns.extend(self._extra_keys) self._i = iter(columns) return self def next(self): n = six.advance_iterator(self._i) return n, getattr(self, n) def update(self, values): """Make the model object behave like a dict.""" for k, v in six.iteritems(values): setattr(self, k, v) def iteritems(self): """Make the model object behave like a dict. Includes attributes from joins. """ local = dict(self) joined = dict([(k, v) for k, v in six.iteritems(self.__dict__) if not k[0] == '_']) local.update(joined) return six.iteritems(local) class TimestampMixin(object): created_at = Column(DateTime, default=lambda: timeutils.utcnow()) updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow()) class SoftDeleteMixin(object): deleted_at = Column(DateTime) deleted = Column(Integer, default=0) def soft_delete(self, session=None): """Mark this object as deleted.""" self.deleted = self.id self.deleted_at = timeutils.utcnow() self.save(session=session) ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/session.py0000664000567000056700000010207112540642615027435 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Session Handling for SQLAlchemy backend. Initializing: * Call set_defaults with the minimal of the following kwargs: sql_connection, sqlite_db Example:: session.set_defaults( sql_connection="sqlite:///var/lib/ceilometer/sqlite.db", sqlite_db="/var/lib/ceilometer/sqlite.db") Recommended ways to use sessions within this framework: * Don't use them explicitly; this is like running with AUTOCOMMIT=1. model_query() will implicitly use a session when called without one supplied. This is the ideal situation because it will allow queries to be automatically retried if the database connection is interrupted. Note: Automatic retry will be enabled in a future patch. It is generally fine to issue several queries in a row like this. Even though they may be run in separate transactions and/or separate sessions, each one will see the data from the prior calls. If needed, undo- or rollback-like functionality should be handled at a logical level. For an example, look at the code around quotas and reservation_rollback(). Examples:: def get_foo(context, foo): return (model_query(context, models.Foo). filter_by(foo=foo). first()) def update_foo(context, id, newfoo): (model_query(context, models.Foo). filter_by(id=id). update({'foo': newfoo})) def create_foo(context, values): foo_ref = models.Foo() foo_ref.update(values) foo_ref.save() return foo_ref * Within the scope of a single method, keeping all the reads and writes within the context managed by a single session. In this way, the session's __exit__ handler will take care of calling flush() and commit() for you. If using this approach, you should not explicitly call flush() or commit(). Any error within the context of the session will cause the session to emit a ROLLBACK. Database Errors like IntegrityError will be raised in session's __exit__ handler, and any try/except within the context managed by session will not be triggered. And catching other non-database errors in the session will not trigger the ROLLBACK, so exception handlers should always be outside the session, unless the developer wants to do a partial commit on purpose. If the connection is dropped before this is possible, the database will implicitly roll back the transaction. Note: statements in the session scope will not be automatically retried. If you create models within the session, they need to be added, but you do not need to call model.save() :: def create_many_foo(context, foos): session = get_session() with session.begin(): for foo in foos: foo_ref = models.Foo() foo_ref.update(foo) session.add(foo_ref) def update_bar(context, foo_id, newbar): session = get_session() with session.begin(): foo_ref = (model_query(context, models.Foo, session). filter_by(id=foo_id). first()) (model_query(context, models.Bar, session). filter_by(id=foo_ref['bar_id']). update({'bar': newbar})) Note: update_bar is a trivially simple example of using "with session.begin". Whereas create_many_foo is a good example of when a transaction is needed, it is always best to use as few queries as possible. The two queries in update_bar can be better expressed using a single query which avoids the need for an explicit transaction. It can be expressed like so:: def update_bar(context, foo_id, newbar): subq = (model_query(context, models.Foo.id). filter_by(id=foo_id). limit(1). subquery()) (model_query(context, models.Bar). filter_by(id=subq.as_scalar()). update({'bar': newbar})) For reference, this emits approximately the following SQL statement:: UPDATE bar SET bar = ${newbar} WHERE id=(SELECT bar_id FROM foo WHERE id = ${foo_id} LIMIT 1); Note: create_duplicate_foo is a trivially simple example of catching an exception while using "with session.begin". Here create two duplicate instances with same primary key, must catch the exception out of context managed by a single session: def create_duplicate_foo(context): foo1 = models.Foo() foo2 = models.Foo() foo1.id = foo2.id = 1 session = get_session() try: with session.begin(): session.add(foo1) session.add(foo2) except exception.DBDuplicateEntry as e: handle_error(e) * Passing an active session between methods. Sessions should only be passed to private methods. The private method must use a subtransaction; otherwise SQLAlchemy will throw an error when you call session.begin() on an existing transaction. Public methods should not accept a session parameter and should not be involved in sessions within the caller's scope. Note that this incurs more overhead in SQLAlchemy than the above means due to nesting transactions, and it is not possible to implicitly retry failed database operations when using this approach. This also makes code somewhat more difficult to read and debug, because a single database transaction spans more than one method. Error handling becomes less clear in this situation. When this is needed for code clarity, it should be clearly documented. :: def myfunc(foo): session = get_session() with session.begin(): # do some database things bar = _private_func(foo, session) return bar def _private_func(foo, session=None): if not session: session = get_session() with session.begin(subtransaction=True): # do some other database things return bar There are some things which it is best to avoid: * Don't keep a transaction open any longer than necessary. This means that your "with session.begin()" block should be as short as possible, while still containing all the related calls for that transaction. * Avoid "with_lockmode('UPDATE')" when possible. In MySQL/InnoDB, when a "SELECT ... FOR UPDATE" query does not match any rows, it will take a gap-lock. This is a form of write-lock on the "gap" where no rows exist, and prevents any other writes to that space. This can effectively prevent any INSERT into a table by locking the gap at the end of the index. Similar problems will occur if the SELECT FOR UPDATE has an overly broad WHERE clause, or doesn't properly use an index. One idea proposed at ODS Fall '12 was to use a normal SELECT to test the number of rows matching a query, and if only one row is returned, then issue the SELECT FOR UPDATE. The better long-term solution is to use INSERT .. ON DUPLICATE KEY UPDATE. However, this can not be done until the "deleted" columns are removed and proper UNIQUE constraints are added to the tables. Enabling soft deletes: * To use/enable soft-deletes, the SoftDeleteMixin must be added to your model class. For example:: class NovaBase(models.SoftDeleteMixin, models.ModelBase): pass Efficient use of soft deletes: * There are two possible ways to mark a record as deleted:: model.soft_delete() and query.soft_delete(). model.soft_delete() method works with single already fetched entry. query.soft_delete() makes only one db request for all entries that correspond to query. * In almost all cases you should use query.soft_delete(). Some examples:: def soft_delete_bar(): count = model_query(BarModel).find(some_condition).soft_delete() if count == 0: raise Exception("0 entries were soft deleted") def complex_soft_delete_with_synchronization_bar(session=None): if session is None: session = get_session() with session.begin(subtransactions=True): count = (model_query(BarModel). find(some_condition). soft_delete(synchronize_session=True)) # Here synchronize_session is required, because we # don't know what is going on in outer session. if count == 0: raise Exception("0 entries were soft deleted") * There is only one situation where model.soft_delete() is appropriate: when you fetch a single record, work with it, and mark it as deleted in the same transaction. :: def soft_delete_bar_model(): session = get_session() with session.begin(): bar_ref = model_query(BarModel).find(some_condition).first() # Work with bar_ref bar_ref.soft_delete(session=session) However, if you need to work with all entries that correspond to query and then soft delete them you should use query.soft_delete() method:: def soft_delete_multi_models(): session = get_session() with session.begin(): query = (model_query(BarModel, session=session). find(some_condition)) model_refs = query.all() # Work with model_refs query.soft_delete(synchronize_session=False) # synchronize_session=False should be set if there is no outer # session and these entries are not used after this. When working with many rows, it is very important to use query.soft_delete, which issues a single query. Using model.soft_delete(), as in the following example, is very inefficient. :: for bar_ref in bar_refs: bar_ref.soft_delete(session=session) # This will produce count(bar_refs) db requests. """ import functools import logging import os.path import re import time from oslo.config import cfg import six from sqlalchemy import exc as sqla_exc from sqlalchemy.interfaces import PoolListener import sqlalchemy.orm from sqlalchemy.pool import NullPool, StaticPool from sqlalchemy.sql.expression import literal_column from ceilometer.openstack.common.db import exception from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import timeutils sqlite_db_opts = [ cfg.StrOpt('sqlite_db', default='ceilometer.sqlite', help='The file name to use with SQLite'), cfg.BoolOpt('sqlite_synchronous', default=True, help='If True, SQLite uses synchronous mode'), ] database_opts = [ cfg.StrOpt('connection', default='sqlite:///' + os.path.abspath(os.path.join(os.path.dirname(__file__), '../', '$sqlite_db')), help='The SQLAlchemy connection string used to connect to the ' 'database', secret=True, deprecated_opts=[cfg.DeprecatedOpt('sql_connection', group='DEFAULT'), cfg.DeprecatedOpt('sql_connection', group='DATABASE'), cfg.DeprecatedOpt('connection', group='sql'), ]), cfg.StrOpt('slave_connection', default='', secret=True, help='The SQLAlchemy connection string used to connect to the ' 'slave database'), cfg.IntOpt('idle_timeout', default=3600, deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout', group='DEFAULT'), cfg.DeprecatedOpt('sql_idle_timeout', group='DATABASE'), cfg.DeprecatedOpt('idle_timeout', group='sql')], help='Timeout before idle sql connections are reaped'), cfg.IntOpt('min_pool_size', default=1, deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size', group='DEFAULT'), cfg.DeprecatedOpt('sql_min_pool_size', group='DATABASE')], help='Minimum number of SQL connections to keep open in a ' 'pool'), cfg.IntOpt('max_pool_size', default=None, deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size', group='DEFAULT'), cfg.DeprecatedOpt('sql_max_pool_size', group='DATABASE')], help='Maximum number of SQL connections to keep open in a ' 'pool'), cfg.IntOpt('max_retries', default=10, deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries', group='DEFAULT'), cfg.DeprecatedOpt('sql_max_retries', group='DATABASE')], help='Maximum db connection retries during startup. ' '(setting -1 implies an infinite retry count)'), cfg.IntOpt('retry_interval', default=10, deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval', group='DEFAULT'), cfg.DeprecatedOpt('reconnect_interval', group='DATABASE')], help='Interval between retries of opening a sql connection'), cfg.IntOpt('max_overflow', default=None, deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow', group='DEFAULT'), cfg.DeprecatedOpt('sqlalchemy_max_overflow', group='DATABASE')], help='If set, use this value for max_overflow with sqlalchemy'), cfg.IntOpt('connection_debug', default=0, deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug', group='DEFAULT')], help='Verbosity of SQL debugging information. 0=None, ' '100=Everything'), cfg.BoolOpt('connection_trace', default=False, deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace', group='DEFAULT')], help='Add python stack traces to SQL as comment strings'), cfg.IntOpt('pool_timeout', default=None, deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout', group='DATABASE')], help='If set, use this value for pool_timeout with sqlalchemy'), ] CONF = cfg.CONF CONF.register_opts(sqlite_db_opts) CONF.register_opts(database_opts, 'database') LOG = logging.getLogger(__name__) _ENGINE = None _MAKER = None _SLAVE_ENGINE = None _SLAVE_MAKER = None def set_defaults(sql_connection, sqlite_db, max_pool_size=None, max_overflow=None, pool_timeout=None): """Set defaults for configuration variables.""" cfg.set_defaults(database_opts, connection=sql_connection) cfg.set_defaults(sqlite_db_opts, sqlite_db=sqlite_db) # Update the QueuePool defaults if max_pool_size is not None: cfg.set_defaults(database_opts, max_pool_size=max_pool_size) if max_overflow is not None: cfg.set_defaults(database_opts, max_overflow=max_overflow) if pool_timeout is not None: cfg.set_defaults(database_opts, pool_timeout=pool_timeout) def cleanup(): global _ENGINE, _MAKER global _SLAVE_ENGINE, _SLAVE_MAKER if _MAKER: _MAKER.close_all() _MAKER = None if _ENGINE: _ENGINE.dispose() _ENGINE = None if _SLAVE_MAKER: _SLAVE_MAKER.close_all() _SLAVE_MAKER = None if _SLAVE_ENGINE: _SLAVE_ENGINE.dispose() _SLAVE_ENGINE = None class SqliteForeignKeysListener(PoolListener): """Ensures that the foreign key constraints are enforced in SQLite. The foreign key constraints are disabled by default in SQLite, so the foreign key constraints will be enabled here for every database connection """ def connect(self, dbapi_con, con_record): dbapi_con.execute('pragma foreign_keys=ON') def get_session(autocommit=True, expire_on_commit=False, sqlite_fk=False, slave_session=False, mysql_traditional_mode=False): """Return a SQLAlchemy session.""" global _MAKER global _SLAVE_MAKER maker = _MAKER if slave_session: maker = _SLAVE_MAKER if maker is None: engine = get_engine(sqlite_fk=sqlite_fk, slave_engine=slave_session, mysql_traditional_mode=mysql_traditional_mode) maker = get_maker(engine, autocommit, expire_on_commit) if slave_session: _SLAVE_MAKER = maker else: _MAKER = maker session = maker() return session # note(boris-42): In current versions of DB backends unique constraint # violation messages follow the structure: # # sqlite: # 1 column - (IntegrityError) column c1 is not unique # N columns - (IntegrityError) column c1, c2, ..., N are not unique # # sqlite since 3.7.16: # 1 column - (IntegrityError) UNIQUE constraint failed: k1 # # N columns - (IntegrityError) UNIQUE constraint failed: k1, k2 # # postgres: # 1 column - (IntegrityError) duplicate key value violates unique # constraint "users_c1_key" # N columns - (IntegrityError) duplicate key value violates unique # constraint "name_of_our_constraint" # # mysql: # 1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key # 'c1'") # N columns - (IntegrityError) (1062, "Duplicate entry 'values joined # with -' for key 'name_of_our_constraint'") _DUP_KEY_RE_DB = { "sqlite": (re.compile(r"^.*columns?([^)]+)(is|are)\s+not\s+unique$"), re.compile(r"^.*UNIQUE\s+constraint\s+failed:\s+(.+)$")), "postgresql": (re.compile(r"^.*duplicate\s+key.*\"([^\"]+)\"\s*\n.*$"),), "mysql": (re.compile(r"^.*\(1062,.*'([^\']+)'\"\)$"),) } def _raise_if_duplicate_entry_error(integrity_error, engine_name): """Raise exception if two entries are duplicated. In this function will be raised DBDuplicateEntry exception if integrity error wrap unique constraint violation. """ def get_columns_from_uniq_cons_or_name(columns): # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2" # where `t` it is table name and columns `c1`, `c2` # are in UniqueConstraint. uniqbase = "uniq_" if not columns.startswith(uniqbase): if engine_name == "postgresql": return [columns[columns.index("_") + 1:columns.rindex("_")]] return [columns] return columns[len(uniqbase):].split("0")[1:] if engine_name not in ["mysql", "sqlite", "postgresql"]: return # FIXME(johannes): The usage of the .message attribute has been # deprecated since Python 2.6. However, the exceptions raised by # SQLAlchemy can differ when using unicode() and accessing .message. # An audit across all three supported engines will be necessary to # ensure there are no regressions. for pattern in _DUP_KEY_RE_DB[engine_name]: match = pattern.match(integrity_error.message) if match: break else: return columns = match.group(1) if engine_name == "sqlite": columns = columns.strip().split(", ") else: columns = get_columns_from_uniq_cons_or_name(columns) raise exception.DBDuplicateEntry(columns, integrity_error) # NOTE(comstud): In current versions of DB backends, Deadlock violation # messages follow the structure: # # mysql: # (OperationalError) (1213, 'Deadlock found when trying to get lock; try ' # 'restarting transaction') _DEADLOCK_RE_DB = { "mysql": re.compile(r"^.*\(1213, 'Deadlock.*") } def _raise_if_deadlock_error(operational_error, engine_name): """Raise exception on deadlock condition. Raise DBDeadlock exception if OperationalError contains a Deadlock condition. """ re = _DEADLOCK_RE_DB.get(engine_name) if re is None: return # FIXME(johannes): The usage of the .message attribute has been # deprecated since Python 2.6. However, the exceptions raised by # SQLAlchemy can differ when using unicode() and accessing .message. # An audit across all three supported engines will be necessary to # ensure there are no regressions. m = re.match(operational_error.message) if not m: return raise exception.DBDeadlock(operational_error) def _wrap_db_error(f): @functools.wraps(f) def _wrap(*args, **kwargs): try: return f(*args, **kwargs) except UnicodeEncodeError: raise exception.DBInvalidUnicodeParameter() # note(boris-42): We should catch unique constraint violation and # wrap it by our own DBDuplicateEntry exception. Unique constraint # violation is wrapped by IntegrityError. except sqla_exc.OperationalError as e: _raise_if_deadlock_error(e, get_engine().name) # NOTE(comstud): A lot of code is checking for OperationalError # so let's not wrap it for now. raise except sqla_exc.IntegrityError as e: # note(boris-42): SqlAlchemy doesn't unify errors from different # DBs so we must do this. Also in some tables (for example # instance_types) there are more than one unique constraint. This # means we should get names of columns, which values violate # unique constraint, from error message. _raise_if_duplicate_entry_error(e, get_engine().name) raise exception.DBError(e) except Exception as e: LOG.exception(_('DB exception wrapped.')) raise exception.DBError(e) return _wrap def get_engine(sqlite_fk=False, slave_engine=False, mysql_traditional_mode=False): """Return a SQLAlchemy engine.""" global _ENGINE global _SLAVE_ENGINE engine = _ENGINE db_uri = CONF.database.connection if slave_engine: engine = _SLAVE_ENGINE db_uri = CONF.database.slave_connection if engine is None: engine = create_engine(db_uri, sqlite_fk=sqlite_fk, mysql_traditional_mode=mysql_traditional_mode) if slave_engine: _SLAVE_ENGINE = engine else: _ENGINE = engine return engine def _synchronous_switch_listener(dbapi_conn, connection_rec): """Switch sqlite connections to non-synchronous mode.""" dbapi_conn.execute("PRAGMA synchronous = OFF") def _add_regexp_listener(dbapi_con, con_record): """Add REGEXP function to sqlite connections.""" def regexp(expr, item): reg = re.compile(expr) return reg.search(six.text_type(item)) is not None dbapi_con.create_function('regexp', 2, regexp) def _thread_yield(dbapi_con, con_record): """Ensure other greenthreads get a chance to be executed. If we use eventlet.monkey_patch(), eventlet.greenthread.sleep(0) will execute instead of time.sleep(0). Force a context switch. With common database backends (eg MySQLdb and sqlite), there is no implicit yield caused by network I/O since they are implemented by C libraries that eventlet cannot monkey patch. """ time.sleep(0) def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): """Ensures that MySQL and DB2 connections are alive. Borrowed from: http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f """ cursor = dbapi_conn.cursor() try: ping_sql = 'select 1' if engine.name == 'ibm_db_sa': # DB2 requires a table expression ping_sql = 'select 1 from (values (1)) AS t1' cursor.execute(ping_sql) except Exception as ex: if engine.dialect.is_disconnect(ex, dbapi_conn, cursor): msg = _('Database server has gone away: %s') % ex LOG.warning(msg) raise sqla_exc.DisconnectionError(msg) else: raise def _set_mode_traditional(dbapi_con, connection_rec, connection_proxy): """Set engine mode to 'traditional'. Required to prevent silent truncates at insert or update operations under MySQL. By default MySQL truncates inserted string if it longer than a declared field just with warning. That is fraught with data corruption. """ dbapi_con.cursor().execute("SET SESSION sql_mode = TRADITIONAL;") def _is_db_connection_error(args): """Return True if error in connecting to db.""" # NOTE(adam_g): This is currently MySQL specific and needs to be extended # to support Postgres and others. # For the db2, the error code is -30081 since the db2 is still not ready conn_err_codes = ('2002', '2003', '2006', '-30081') for err_code in conn_err_codes: if args.find(err_code) != -1: return True return False def create_engine(sql_connection, sqlite_fk=False, mysql_traditional_mode=False): """Return a new SQLAlchemy engine.""" # NOTE(geekinutah): At this point we could be connecting to the normal # db handle or the slave db handle. Things like # _wrap_db_error aren't going to work well if their # backends don't match. Let's check. _assert_matching_drivers() connection_dict = sqlalchemy.engine.url.make_url(sql_connection) engine_args = { "pool_recycle": CONF.database.idle_timeout, "echo": False, 'convert_unicode': True, } # Map our SQL debug level to SQLAlchemy's options if CONF.database.connection_debug >= 100: engine_args['echo'] = 'debug' elif CONF.database.connection_debug >= 50: engine_args['echo'] = True if "sqlite" in connection_dict.drivername: if sqlite_fk: engine_args["listeners"] = [SqliteForeignKeysListener()] engine_args["poolclass"] = NullPool if CONF.database.connection == "sqlite://": engine_args["poolclass"] = StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: if CONF.database.max_pool_size is not None: engine_args['pool_size'] = CONF.database.max_pool_size if CONF.database.max_overflow is not None: engine_args['max_overflow'] = CONF.database.max_overflow if CONF.database.pool_timeout is not None: engine_args['pool_timeout'] = CONF.database.pool_timeout engine = sqlalchemy.create_engine(sql_connection, **engine_args) sqlalchemy.event.listen(engine, 'checkin', _thread_yield) if engine.name in ['mysql', 'ibm_db_sa']: callback = functools.partial(_ping_listener, engine) sqlalchemy.event.listen(engine, 'checkout', callback) if engine.name == 'mysql': if mysql_traditional_mode: sqlalchemy.event.listen(engine, 'checkout', _set_mode_traditional) else: LOG.warning(_("This application has not enabled MySQL " "traditional mode, which means silent " "data corruption may occur. " "Please encourage the application " "developers to enable this mode.")) elif 'sqlite' in connection_dict.drivername: if not CONF.sqlite_synchronous: sqlalchemy.event.listen(engine, 'connect', _synchronous_switch_listener) sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener) if (CONF.database.connection_trace and engine.dialect.dbapi.__name__ == 'MySQLdb'): _patch_mysqldb_with_stacktrace_comments() try: engine.connect() except sqla_exc.OperationalError as e: if not _is_db_connection_error(e.args[0]): raise remaining = CONF.database.max_retries if remaining == -1: remaining = 'infinite' while True: msg = _('SQL connection failed. %s attempts left.') LOG.warning(msg % remaining) if remaining != 'infinite': remaining -= 1 time.sleep(CONF.database.retry_interval) try: engine.connect() break except sqla_exc.OperationalError as e: if (remaining != 'infinite' and remaining == 0) or \ not _is_db_connection_error(e.args[0]): raise return engine class Query(sqlalchemy.orm.query.Query): """Subclass of sqlalchemy.query with soft_delete() method.""" def soft_delete(self, synchronize_session='evaluate'): return self.update({'deleted': literal_column('id'), 'updated_at': literal_column('updated_at'), 'deleted_at': timeutils.utcnow()}, synchronize_session=synchronize_session) class Session(sqlalchemy.orm.session.Session): """Custom Session class to avoid SqlAlchemy Session monkey patching.""" @_wrap_db_error def query(self, *args, **kwargs): return super(Session, self).query(*args, **kwargs) @_wrap_db_error def flush(self, *args, **kwargs): return super(Session, self).flush(*args, **kwargs) @_wrap_db_error def execute(self, *args, **kwargs): return super(Session, self).execute(*args, **kwargs) def get_maker(engine, autocommit=True, expire_on_commit=False): """Return a SQLAlchemy sessionmaker using the given engine.""" return sqlalchemy.orm.sessionmaker(bind=engine, class_=Session, autocommit=autocommit, expire_on_commit=expire_on_commit, query_cls=Query) def _patch_mysqldb_with_stacktrace_comments(): """Adds current stack trace as a comment in queries. Patches MySQLdb.cursors.BaseCursor._do_query. """ import MySQLdb.cursors import traceback old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query def _do_query(self, q): stack = '' for filename, line, method, function in traceback.extract_stack(): # exclude various common things from trace if filename.endswith('session.py') and method == '_do_query': continue if filename.endswith('api.py') and method == 'wrapper': continue if filename.endswith('utils.py') and method == '_inner': continue if filename.endswith('exception.py') and method == '_wrap': continue # db/api is just a wrapper around db/sqlalchemy/api if filename.endswith('db/api.py'): continue # only trace inside ceilometer index = filename.rfind('ceilometer') if index == -1: continue stack += "File:%s:%s Method:%s() Line:%s | " \ % (filename[index:], line, method, function) # strip trailing " | " from stack if stack: stack = stack[:-3] qq = "%s /* %s */" % (q, stack) else: qq = q old_mysql_do_query(self, qq) setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query) def _assert_matching_drivers(): """Make sure slave handle and normal handle have the same driver.""" # NOTE(geekinutah): There's no use case for writing to one backend and # reading from another. Who knows what the future holds? if CONF.database.slave_connection == '': return normal = sqlalchemy.engine.url.make_url(CONF.database.connection) slave = sqlalchemy.engine.url.make_url(CONF.database.slave_connection) assert normal.drivername == slave.drivername ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/__init__.py0000664000567000056700000000000012540642615027476 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/test_migrations.py0000664000567000056700000002555612540642615031201 0ustar jenkinsjenkins00000000000000# Copyright 2010-2011 OpenStack Foundation # Copyright 2012-2013 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import logging import os import subprocess import lockfile from six import moves import sqlalchemy import sqlalchemy.exc from ceilometer.openstack.common.db.sqlalchemy import utils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common.py3kcompat import urlutils from ceilometer.openstack.common import test LOG = logging.getLogger(__name__) def _have_mysql(user, passwd, database): present = os.environ.get('TEST_MYSQL_PRESENT') if present is None: return utils.is_backend_avail(backend='mysql', user=user, passwd=passwd, database=database) return present.lower() in ('', 'true') def _have_postgresql(user, passwd, database): present = os.environ.get('TEST_POSTGRESQL_PRESENT') if present is None: return utils.is_backend_avail(backend='postgres', user=user, passwd=passwd, database=database) return present.lower() in ('', 'true') def _set_db_lock(lock_path=None, lock_prefix=None): def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): try: path = lock_path or os.environ.get("CEILOMETER_LOCK_PATH") lock = lockfile.FileLock(os.path.join(path, lock_prefix)) with lock: LOG.debug(_('Got lock "%s"') % f.__name__) return f(*args, **kwargs) finally: LOG.debug(_('Lock released "%s"') % f.__name__) return wrapper return decorator class BaseMigrationTestCase(test.BaseTestCase): """Base class fort testing of migration utils.""" def __init__(self, *args, **kwargs): super(BaseMigrationTestCase, self).__init__(*args, **kwargs) self.DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_migrations.conf') # Test machines can set the TEST_MIGRATIONS_CONF variable # to override the location of the config file for migration testing self.CONFIG_FILE_PATH = os.environ.get('TEST_MIGRATIONS_CONF', self.DEFAULT_CONFIG_FILE) self.test_databases = {} self.migration_api = None def setUp(self): super(BaseMigrationTestCase, self).setUp() # Load test databases from the config file. Only do this # once. No need to re-run this on each test... LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) if os.path.exists(self.CONFIG_FILE_PATH): cp = moves.configparser.RawConfigParser() try: cp.read(self.CONFIG_FILE_PATH) defaults = cp.defaults() for key, value in defaults.items(): self.test_databases[key] = value except moves.configparser.ParsingError as e: self.fail("Failed to read test_migrations.conf config " "file. Got error: %s" % e) else: self.fail("Failed to find test_migrations.conf config " "file.") self.engines = {} for key, value in self.test_databases.items(): self.engines[key] = sqlalchemy.create_engine(value) # We start each test case with a completely blank slate. self._reset_databases() def tearDown(self): # We destroy the test data store between each test case, # and recreate it, which ensures that we have no side-effects # from the tests self._reset_databases() super(BaseMigrationTestCase, self).tearDown() def execute_cmd(self, cmd=None): process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = process.communicate()[0] LOG.debug(output) self.assertEqual(0, process.returncode, "Failed to run: %s\n%s" % (cmd, output)) def _reset_pg(self, conn_pieces): (user, password, database, host) = utils.get_db_connection_info(conn_pieces) os.environ['PGPASSWORD'] = password os.environ['PGUSER'] = user # note(boris-42): We must create and drop database, we can't # drop database which we have connected to, so for such # operations there is a special database template1. sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" " '%(sql)s' -d template1") sql = ("drop database if exists %s;") % database droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql} self.execute_cmd(droptable) sql = ("create database %s;") % database createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} self.execute_cmd(createtable) os.unsetenv('PGPASSWORD') os.unsetenv('PGUSER') @_set_db_lock(lock_prefix='migration_tests-') def _reset_databases(self): for key, engine in self.engines.items(): conn_string = self.test_databases[key] conn_pieces = urlutils.urlparse(conn_string) engine.dispose() if conn_string.startswith('sqlite'): # We can just delete the SQLite database, which is # the easiest and cleanest solution db_path = conn_pieces.path.strip('/') if os.path.exists(db_path): os.unlink(db_path) # No need to recreate the SQLite DB. SQLite will # create it for us if it's not there... elif conn_string.startswith('mysql'): # We can execute the MySQL client to destroy and re-create # the MYSQL database, which is easier and less error-prone # than using SQLAlchemy to do this via MetaData...trust me. (user, password, database, host) = \ utils.get_db_connection_info(conn_pieces) sql = ("drop database if exists %(db)s; " "create database %(db)s;") % {'db': database} cmd = ("mysql -u \"%(user)s\" -p\"%(password)s\" -h %(host)s " "-e \"%(sql)s\"") % {'user': user, 'password': password, 'host': host, 'sql': sql} self.execute_cmd(cmd) elif conn_string.startswith('postgresql'): self._reset_pg(conn_pieces) class WalkVersionsMixin(object): def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): # Determine latest version script from the repo, then # upgrade from 1 through to the latest, with no data # in the databases. This just checks that the schema itself # upgrades successfully. # Place the database under version control self.migration_api.version_control(engine, self.REPOSITORY, self.INIT_VERSION) self.assertEqual(self.INIT_VERSION, self.migration_api.db_version(engine, self.REPOSITORY)) LOG.debug('latest version is %s' % self.REPOSITORY.latest) versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) for version in versions: # upgrade -> downgrade -> upgrade self._migrate_up(engine, version, with_data=True) if snake_walk: downgraded = self._migrate_down( engine, version - 1, with_data=True) if downgraded: self._migrate_up(engine, version) if downgrade: # Now walk it back down to 0 from the latest, testing # the downgrade paths. for version in reversed(versions): # downgrade -> upgrade -> downgrade downgraded = self._migrate_down(engine, version - 1) if snake_walk and downgraded: self._migrate_up(engine, version) self._migrate_down(engine, version - 1) def _migrate_down(self, engine, version, with_data=False): try: self.migration_api.downgrade(engine, self.REPOSITORY, version) except NotImplementedError: # NOTE(sirp): some migrations, namely release-level # migrations, don't support a downgrade. return False self.assertEqual( version, self.migration_api.db_version(engine, self.REPOSITORY)) # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target' # version). So if we have any downgrade checks, they need to be run for # the previous (higher numbered) migration. if with_data: post_downgrade = getattr( self, "_post_downgrade_%03d" % (version + 1), None) if post_downgrade: post_downgrade(engine) return True def _migrate_up(self, engine, version, with_data=False): """migrate up to a new version of the db. We allow for data insertion and post checks at every migration version with special _pre_upgrade_### and _check_### functions in the main test. """ # NOTE(sdague): try block is here because it's impossible to debug # where a failed data migration happens otherwise try: if with_data: data = None pre_upgrade = getattr( self, "_pre_upgrade_%03d" % version, None) if pre_upgrade: data = pre_upgrade(engine) self.migration_api.upgrade(engine, self.REPOSITORY, version) self.assertEqual(version, self.migration_api.db_version(engine, self.REPOSITORY)) if with_data: check = getattr(self, "_check_%03d" % version, None) if check: check(engine, data) except Exception: LOG.error("Failed to migrate to version %s on engine %s" % (version, engine)) raise ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/migration.py0000664000567000056700000002323412540642615027746 0ustar jenkinsjenkins00000000000000# coding: utf-8 # # Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Base on code in migrate/changeset/databases/sqlite.py which is under # the following license: # # The MIT License # # Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os import re from migrate.changeset import ansisql from migrate.changeset.databases import sqlite from migrate import exceptions as versioning_exceptions from migrate.versioning import api as versioning_api from migrate.versioning.repository import Repository import sqlalchemy from sqlalchemy.schema import UniqueConstraint from ceilometer.openstack.common.db import exception from ceilometer.openstack.common.db.sqlalchemy import session as db_session from ceilometer.openstack.common.gettextutils import _ get_engine = db_session.get_engine def _get_unique_constraints(self, table): """Retrieve information about existing unique constraints of the table This feature is needed for _recreate_table() to work properly. Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x. """ data = table.metadata.bind.execute( """SELECT sql FROM sqlite_master WHERE type='table' AND name=:table_name""", table_name=table.name ).fetchone()[0] UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" return [ UniqueConstraint( *[getattr(table.columns, c.strip(' "')) for c in cols.split(",")], name=name ) for name, cols in re.findall(UNIQUE_PATTERN, data) ] def _recreate_table(self, table, column=None, delta=None, omit_uniques=None): """Recreate the table properly Unlike the corresponding original method of sqlalchemy-migrate this one doesn't drop existing unique constraints when creating a new one. """ table_name = self.preparer.format_table(table) # we remove all indexes so as not to have # problems during copy and re-create for index in table.indexes: index.drop() # reflect existing unique constraints for uc in self._get_unique_constraints(table): table.append_constraint(uc) # omit given unique constraints when creating a new table if required table.constraints = set([ cons for cons in table.constraints if omit_uniques is None or cons.name not in omit_uniques ]) self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) self.execute() insertion_string = self._modify_table(table, column, delta) table.create(bind=self.connection) self.append(insertion_string % {'table_name': table_name}) self.execute() self.append('DROP TABLE migration_tmp') self.execute() def _visit_migrate_unique_constraint(self, *p, **k): """Drop the given unique constraint The corresponding original method of sqlalchemy-migrate just raises NotImplemented error """ self.recreate_table(p[0].table, omit_uniques=[p[0].name]) def patch_migrate(): """A workaround for SQLite's inability to alter things SQLite abilities to alter tables are very limited (please read http://www.sqlite.org/lang_altertable.html for more details). E. g. one can't drop a column or a constraint in SQLite. The workaround for this is to recreate the original table omitting the corresponding constraint (or column). sqlalchemy-migrate library has recreate_table() method that implements this workaround, but it does it wrong: - information about unique constraints of a table is not retrieved. So if you have a table with one unique constraint and a migration adding another one you will end up with a table that has only the latter unique constraint, and the former will be lost - dropping of unique constraints is not supported at all The proper way to fix this is to provide a pull-request to sqlalchemy-migrate, but the project seems to be dead. So we can go on with monkey-patching of the lib at least for now. """ # this patch is needed to ensure that recreate_table() doesn't drop # existing unique constraints of the table when creating a new one helper_cls = sqlite.SQLiteHelper helper_cls.recreate_table = _recreate_table helper_cls._get_unique_constraints = _get_unique_constraints # this patch is needed to be able to drop existing unique constraints constraint_cls = sqlite.SQLiteConstraintDropper constraint_cls.visit_migrate_unique_constraint = \ _visit_migrate_unique_constraint constraint_cls.__bases__ = (ansisql.ANSIColumnDropper, sqlite.SQLiteConstraintGenerator) def db_sync(abs_path, version=None, init_version=0): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(abs_path, init_version) repository = _find_migrate_repo(abs_path) _db_schema_sanity_check() if version is None or version > current_version: return versioning_api.upgrade(get_engine(), repository, version) else: return versioning_api.downgrade(get_engine(), repository, version) def _db_schema_sanity_check(): engine = get_engine() if engine.name == 'mysql': onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' 'from information_schema.TABLES ' 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') table_names = [res[0] for res in engine.execute(onlyutf8_sql, engine.url.database)] if len(table_names) > 0: raise ValueError(_('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8' ) % ','.join(table_names)) def db_version(abs_path, init_version): """Show the current version of the repository. :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) try: return versioning_api.db_version(get_engine(), repository) except versioning_exceptions.DatabaseNotControlledError: meta = sqlalchemy.MetaData() engine = get_engine() meta.reflect(bind=engine) tables = meta.tables if len(tables) == 0 or 'alembic_version' in tables: db_version_control(abs_path, init_version) return versioning_api.db_version(get_engine(), repository) else: raise exception.DbMigrationError( message=_( "The database is not under version control, but has " "tables. Please stamp the current version of the schema " "manually.")) def db_version_control(abs_path, version=None): """Mark a database as under this repository's version control. Once a database is under version control, schema changes should only be done via change scripts in this repository. :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) versioning_api.version_control(get_engine(), repository, version) return version def _find_migrate_repo(abs_path): """Get the project's change script repository :param abs_path: Absolute path to migrate repository """ if not os.path.exists(abs_path): raise exception.DbMigrationError("Path %s not found" % abs_path) return Repository(abs_path) ceilometer-2014.1.5/ceilometer/openstack/common/db/sqlalchemy/utils.py0000664000567000056700000005001312540642615027110 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2010-2011 OpenStack Foundation. # Copyright 2012 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import re from migrate.changeset import UniqueConstraint import sqlalchemy from sqlalchemy import Boolean from sqlalchemy import CheckConstraint from sqlalchemy import Column from sqlalchemy.engine import reflection from sqlalchemy.ext.compiler import compiles from sqlalchemy import func from sqlalchemy import Index from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy.sql.expression import literal_column from sqlalchemy.sql.expression import UpdateBase from sqlalchemy.sql import select from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.types import NullType from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import timeutils LOG = logging.getLogger(__name__) _DBURL_REGEX = re.compile(r"[^:]+://([^:]+):([^@]+)@.+") def sanitize_db_url(url): match = _DBURL_REGEX.match(url) if match: return '%s****:****%s' % (url[:match.start(1)], url[match.end(2):]) return url class InvalidSortKey(Exception): message = _("Sort key supplied was not valid.") # copy from glance/db/sqlalchemy/api.py def paginate_query(query, model, limit, sort_keys, marker=None, sort_dir=None, sort_dirs=None): """Returns a query with sorting / pagination criteria added. Pagination works by requiring a unique sort_key, specified by sort_keys. (If sort_keys is not unique, then we risk looping through values.) We use the last row in the previous page as the 'marker' for pagination. So we must return values that follow the passed marker in the order. With a single-valued sort_key, this would be easy: sort_key > X. With a compound-values sort_key, (k1, k2, k3) we must do this to repeat the lexicographical ordering: (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3) We also have to cope with different sort_directions. Typically, the id of the last row is used as the client-facing pagination marker, then the actual marker object must be fetched from the db and passed in to us as marker. :param query: the query object to which we should add paging/sorting :param model: the ORM model class :param limit: maximum number of items to return :param sort_keys: array of attributes by which results should be sorted :param marker: the last item of the previous page; we returns the next results after this value. :param sort_dir: direction in which results should be sorted (asc, desc) :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys :rtype: sqlalchemy.orm.query.Query :return: The query with sorting/pagination added. """ if 'id' not in sort_keys: # TODO(justinsb): If this ever gives a false-positive, check # the actual primary key, rather than assuming its id LOG.warning(_('Id not in sort_keys; is sort_keys unique?')) assert(not (sort_dir and sort_dirs)) # Default the sort direction to ascending if sort_dirs is None and sort_dir is None: sort_dir = 'asc' # Ensure a per-column sort direction if sort_dirs is None: sort_dirs = [sort_dir for _sort_key in sort_keys] assert(len(sort_dirs) == len(sort_keys)) # Add sorting for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): try: sort_dir_func = { 'asc': sqlalchemy.asc, 'desc': sqlalchemy.desc, }[current_sort_dir] except KeyError: raise ValueError(_("Unknown sort direction, " "must be 'desc' or 'asc'")) try: sort_key_attr = getattr(model, current_sort_key) except AttributeError: raise InvalidSortKey() query = query.order_by(sort_dir_func(sort_key_attr)) # Add pagination if marker is not None: marker_values = [] for sort_key in sort_keys: v = getattr(marker, sort_key) marker_values.append(v) # Build up an array of sort criteria as in the docstring criteria_list = [] for i in range(len(sort_keys)): crit_attrs = [] for j in range(i): model_attr = getattr(model, sort_keys[j]) crit_attrs.append((model_attr == marker_values[j])) model_attr = getattr(model, sort_keys[i]) if sort_dirs[i] == 'desc': crit_attrs.append((model_attr < marker_values[i])) else: crit_attrs.append((model_attr > marker_values[i])) criteria = sqlalchemy.sql.and_(*crit_attrs) criteria_list.append(criteria) f = sqlalchemy.sql.or_(*criteria_list) query = query.filter(f) if limit is not None: query = query.limit(limit) return query def get_table(engine, name): """Returns an sqlalchemy table dynamically from db. Needed because the models don't work for us in migrations as models will be far out of sync with the current data. """ metadata = MetaData() metadata.bind = engine return Table(name, metadata, autoload=True) class InsertFromSelect(UpdateBase): """Form the base for `INSERT INTO table (SELECT ... )` statement.""" def __init__(self, table, select): self.table = table self.select = select @compiles(InsertFromSelect) def visit_insert_from_select(element, compiler, **kw): """Form the `INSERT INTO table (SELECT ... )` statement.""" return "INSERT INTO %s %s" % ( compiler.process(element.table, asfrom=True), compiler.process(element.select)) class ColumnError(Exception): """Error raised when no column or an invalid column is found.""" def _get_not_supported_column(col_name_col_instance, column_name): try: column = col_name_col_instance[column_name] except KeyError: msg = _("Please specify column %s in col_name_col_instance " "param. It is required because column has unsupported " "type by sqlite).") raise ColumnError(msg % column_name) if not isinstance(column, Column): msg = _("col_name_col_instance param has wrong type of " "column instance for column %s It should be instance " "of sqlalchemy.Column.") raise ColumnError(msg % column_name) return column def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, **col_name_col_instance): """Drop unique constraint from table. This method drops UC from table and works for mysql, postgresql and sqlite. In mysql and postgresql we are able to use "alter table" construction. Sqlalchemy doesn't support some sqlite column types and replaces their type with NullType in metadata. We process these columns and replace NullType with the correct column type. :param migrate_engine: sqlalchemy engine :param table_name: name of table that contains uniq constraint. :param uc_name: name of uniq constraint that will be dropped. :param columns: columns that are in uniq constraint. :param col_name_col_instance: contains pair column_name=column_instance. column_instance is instance of Column. These params are required only for columns that have unsupported types by sqlite. For example BigInteger. """ meta = MetaData() meta.bind = migrate_engine t = Table(table_name, meta, autoload=True) if migrate_engine.name == "sqlite": override_cols = [ _get_not_supported_column(col_name_col_instance, col.name) for col in t.columns if isinstance(col.type, NullType) ] for col in override_cols: t.columns.replace(col) uc = UniqueConstraint(*columns, table=t, name=uc_name) uc.drop() def drop_old_duplicate_entries_from_table(migrate_engine, table_name, use_soft_delete, *uc_column_names): """Drop all old rows having the same values for columns in uc_columns. This method drop (or mark ad `deleted` if use_soft_delete is True) old duplicate rows form table with name `table_name`. :param migrate_engine: Sqlalchemy engine :param table_name: Table with duplicates :param use_soft_delete: If True - values will be marked as `deleted`, if False - values will be removed from table :param uc_column_names: Unique constraint columns """ meta = MetaData() meta.bind = migrate_engine table = Table(table_name, meta, autoload=True) columns_for_group_by = [table.c[name] for name in uc_column_names] columns_for_select = [func.max(table.c.id)] columns_for_select.extend(columns_for_group_by) duplicated_rows_select = select(columns_for_select, group_by=columns_for_group_by, having=func.count(table.c.id) > 1) for row in migrate_engine.execute(duplicated_rows_select): # NOTE(boris-42): Do not remove row that has the biggest ID. delete_condition = table.c.id != row[0] is_none = None # workaround for pyflakes delete_condition &= table.c.deleted_at == is_none for name in uc_column_names: delete_condition &= table.c[name] == row[name] rows_to_delete_select = select([table.c.id]).where(delete_condition) for row in migrate_engine.execute(rows_to_delete_select).fetchall(): LOG.info(_("Deleting duplicated row with id: %(id)s from table: " "%(table)s") % dict(id=row[0], table=table_name)) if use_soft_delete: delete_statement = table.update().\ where(delete_condition).\ values({ 'deleted': literal_column('id'), 'updated_at': literal_column('updated_at'), 'deleted_at': timeutils.utcnow() }) else: delete_statement = table.delete().where(delete_condition) migrate_engine.execute(delete_statement) def _get_default_deleted_value(table): if isinstance(table.c.id.type, Integer): return 0 if isinstance(table.c.id.type, String): return "" raise ColumnError(_("Unsupported id columns type")) def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes): table = get_table(migrate_engine, table_name) insp = reflection.Inspector.from_engine(migrate_engine) real_indexes = insp.get_indexes(table_name) existing_index_names = dict( [(index['name'], index['column_names']) for index in real_indexes]) # NOTE(boris-42): Restore indexes on `deleted` column for index in indexes: if 'deleted' not in index['column_names']: continue name = index['name'] if name in existing_index_names: column_names = [table.c[c] for c in existing_index_names[name]] old_index = Index(name, *column_names, unique=index["unique"]) old_index.drop(migrate_engine) column_names = [table.c[c] for c in index['column_names']] new_index = Index(index["name"], *column_names, unique=index["unique"]) new_index.create(migrate_engine) def change_deleted_column_type_to_boolean(migrate_engine, table_name, **col_name_col_instance): if migrate_engine.name == "sqlite": return _change_deleted_column_type_to_boolean_sqlite( migrate_engine, table_name, **col_name_col_instance) insp = reflection.Inspector.from_engine(migrate_engine) indexes = insp.get_indexes(table_name) table = get_table(migrate_engine, table_name) old_deleted = Column('old_deleted', Boolean, default=False) old_deleted.create(table, populate_default=False) table.update().\ where(table.c.deleted == table.c.id).\ values(old_deleted=True).\ execute() table.c.deleted.drop() table.c.old_deleted.alter(name="deleted") _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name, **col_name_col_instance): insp = reflection.Inspector.from_engine(migrate_engine) table = get_table(migrate_engine, table_name) columns = [] for column in table.columns: column_copy = None if column.name != "deleted": if isinstance(column.type, NullType): column_copy = _get_not_supported_column(col_name_col_instance, column.name) else: column_copy = column.copy() else: column_copy = Column('deleted', Boolean, default=0) columns.append(column_copy) constraints = [constraint.copy() for constraint in table.constraints] meta = table.metadata new_table = Table(table_name + "__tmp__", meta, *(columns + constraints)) new_table.create() indexes = [] for index in insp.get_indexes(table_name): column_names = [new_table.c[c] for c in index['column_names']] indexes.append(Index(index["name"], *column_names, unique=index["unique"])) c_select = [] for c in table.c: if c.name != "deleted": c_select.append(c) else: c_select.append(table.c.deleted == table.c.id) ins = InsertFromSelect(new_table, select(c_select)) migrate_engine.execute(ins) table.drop() [index.create(migrate_engine) for index in indexes] new_table.rename(table_name) new_table.update().\ where(new_table.c.deleted == new_table.c.id).\ values(deleted=True).\ execute() def change_deleted_column_type_to_id_type(migrate_engine, table_name, **col_name_col_instance): if migrate_engine.name == "sqlite": return _change_deleted_column_type_to_id_type_sqlite( migrate_engine, table_name, **col_name_col_instance) insp = reflection.Inspector.from_engine(migrate_engine) indexes = insp.get_indexes(table_name) table = get_table(migrate_engine, table_name) new_deleted = Column('new_deleted', table.c.id.type, default=_get_default_deleted_value(table)) new_deleted.create(table, populate_default=True) deleted = True # workaround for pyflakes table.update().\ where(table.c.deleted == deleted).\ values(new_deleted=table.c.id).\ execute() table.c.deleted.drop() table.c.new_deleted.alter(name="deleted") _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name, **col_name_col_instance): # NOTE(boris-42): sqlaclhemy-migrate can't drop column with check # constraints in sqlite DB and our `deleted` column has # 2 check constraints. So there is only one way to remove # these constraints: # 1) Create new table with the same columns, constraints # and indexes. (except deleted column). # 2) Copy all data from old to new table. # 3) Drop old table. # 4) Rename new table to old table name. insp = reflection.Inspector.from_engine(migrate_engine) meta = MetaData(bind=migrate_engine) table = Table(table_name, meta, autoload=True) default_deleted_value = _get_default_deleted_value(table) columns = [] for column in table.columns: column_copy = None if column.name != "deleted": if isinstance(column.type, NullType): column_copy = _get_not_supported_column(col_name_col_instance, column.name) else: column_copy = column.copy() else: column_copy = Column('deleted', table.c.id.type, default=default_deleted_value) columns.append(column_copy) def is_deleted_column_constraint(constraint): # NOTE(boris-42): There is no other way to check is CheckConstraint # associated with deleted column. if not isinstance(constraint, CheckConstraint): return False sqltext = str(constraint.sqltext) return (sqltext.endswith("deleted in (0, 1)") or sqltext.endswith("deleted IN (:deleted_1, :deleted_2)")) constraints = [] for constraint in table.constraints: if not is_deleted_column_constraint(constraint): constraints.append(constraint.copy()) new_table = Table(table_name + "__tmp__", meta, *(columns + constraints)) new_table.create() indexes = [] for index in insp.get_indexes(table_name): column_names = [new_table.c[c] for c in index['column_names']] indexes.append(Index(index["name"], *column_names, unique=index["unique"])) ins = InsertFromSelect(new_table, table.select()) migrate_engine.execute(ins) table.drop() [index.create(migrate_engine) for index in indexes] new_table.rename(table_name) deleted = True # workaround for pyflakes new_table.update().\ where(new_table.c.deleted == deleted).\ values(deleted=new_table.c.id).\ execute() # NOTE(boris-42): Fix value of deleted column: False -> "" or 0. deleted = False # workaround for pyflakes new_table.update().\ where(new_table.c.deleted == deleted).\ values(deleted=default_deleted_value).\ execute() def get_connect_string(backend, database, user=None, passwd=None): """Get database connection Try to get a connection with a very specific set of values, if we get these then we'll run the tests, otherwise they are skipped """ args = {'backend': backend, 'user': user, 'passwd': passwd, 'database': database} if backend == 'sqlite': template = '%(backend)s:///%(database)s' else: template = "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" return template % args def is_backend_avail(backend, database, user=None, passwd=None): try: connect_uri = get_connect_string(backend=backend, database=database, user=user, passwd=passwd) engine = sqlalchemy.create_engine(connect_uri) connection = engine.connect() except Exception: # intentionally catch all to handle exceptions even if we don't # have any backend code loaded. return False else: connection.close() engine.dispose() return True def get_db_connection_info(conn_pieces): database = conn_pieces.path.strip('/') loc_pieces = conn_pieces.netloc.split('@') host = loc_pieces[1] auth_pieces = loc_pieces[0].split(':') user = auth_pieces[0] password = "" if len(auth_pieces) > 1: password = auth_pieces[1].strip() return (user, password, database, host) ceilometer-2014.1.5/ceilometer/openstack/common/db/__init__.py0000664000567000056700000000000012540642615025334 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/db/api.py0000664000567000056700000000352312540642615024363 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Multiple DB API backend support. Supported configuration options: The following two parameters are in the 'database' group: `backend`: DB backend name or full module path to DB backend module. A DB backend module should implement a method named 'get_backend' which takes no arguments. The method can return any object that implements DB API methods. """ from oslo.config import cfg from ceilometer.openstack.common import importutils db_opts = [ cfg.StrOpt('backend', default='sqlalchemy', deprecated_name='db_backend', deprecated_group='DEFAULT', help='The backend to use for db'), ] CONF = cfg.CONF CONF.register_opts(db_opts, 'database') class DBAPI(object): def __init__(self, backend_mapping=None): if backend_mapping is None: backend_mapping = {} backend_name = CONF.database.backend # Import the untranslated name if we don't have a # mapping. backend_path = backend_mapping.get(backend_name, backend_name) backend_mod = importutils.import_module(backend_path) self.__backend = backend_mod.get_backend() def __getattr__(self, key): return getattr(self.__backend, key) ceilometer-2014.1.5/ceilometer/openstack/common/eventlet_backdoor.py0000664000567000056700000001127512540642615026722 0ustar jenkinsjenkins00000000000000# Copyright (c) 2012 OpenStack Foundation. # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import errno import gc import os import pprint import socket import sys import traceback import eventlet import eventlet.backdoor import greenlet from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging help_for_backdoor_port = ( "Acceptable values are 0, , and :, where 0 results " "in listening on a random tcp port number; results in listening " "on the specified port number (and not enabling backdoor if that port " "is in use); and : results in listening on the smallest " "unused port number within the specified range of port numbers. The " "chosen port is displayed in the service's log file.") eventlet_backdoor_opts = [ cfg.StrOpt('backdoor_port', default=None, help="Enable eventlet backdoor. %s" % help_for_backdoor_port) ] CONF = cfg.CONF CONF.register_opts(eventlet_backdoor_opts) LOG = logging.getLogger(__name__) class EventletBackdoorConfigValueError(Exception): def __init__(self, port_range, help_msg, ex): msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' '%(help)s' % {'range': port_range, 'ex': ex, 'help': help_msg}) super(EventletBackdoorConfigValueError, self).__init__(msg) self.port_range = port_range def _dont_use_this(): print("Don't use this, just disconnect instead") def _find_objects(t): return [o for o in gc.get_objects() if isinstance(o, t)] def _print_greenthreads(): for i, gt in enumerate(_find_objects(greenlet.greenlet)): print(i, gt) traceback.print_stack(gt.gr_frame) print() def _print_nativethreads(): for threadId, stack in sys._current_frames().items(): print(threadId) traceback.print_stack(stack) print() def _parse_port_range(port_range): if ':' not in port_range: start, end = port_range, port_range else: start, end = port_range.split(':', 1) try: start, end = int(start), int(end) if end < start: raise ValueError return start, end except ValueError as ex: raise EventletBackdoorConfigValueError(port_range, ex, help_for_backdoor_port) def _listen(host, start_port, end_port, listen_func): try_port = start_port while True: try: return listen_func((host, try_port)) except socket.error as exc: if (exc.errno != errno.EADDRINUSE or try_port >= end_port): raise try_port += 1 def initialize_if_enabled(): backdoor_locals = { 'exit': _dont_use_this, # So we don't exit the entire process 'quit': _dont_use_this, # So we don't exit the entire process 'fo': _find_objects, 'pgt': _print_greenthreads, 'pnt': _print_nativethreads, } if CONF.backdoor_port is None: return None start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) # NOTE(johannes): The standard sys.displayhook will print the value of # the last expression and set it to __builtin__._, which overwrites # the __builtin__._ that gettext sets. Let's switch to using pprint # since it won't interact poorly with gettext, and it's easier to # read the output too. def displayhook(val): if val is not None: pprint.pprint(val) sys.displayhook = displayhook sock = _listen('localhost', start_port, end_port, eventlet.listen) # In the case of backdoor port being zero, a port number is assigned by # listen(). In any case, pull the port number out here. port = sock.getsockname()[1] LOG.info(_('Eventlet backdoor listening on %(port)s for process %(pid)d') % {'port': port, 'pid': os.getpid()}) eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, locals=backdoor_locals) return port ceilometer-2014.1.5/ceilometer/openstack/common/context.py0000664000567000056700000000701512540642615024711 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Simple class that stores security context information in the web request. Projects should subclass this class if they wish to enhance the request context or provide additional information in their specific WSGI pipeline. """ import itertools import uuid def generate_request_id(): return 'req-%s' % str(uuid.uuid4()) class RequestContext(object): """Helper class to represent useful information about a request context. Stores information about the security context under which the user accesses the system, as well as additional request information. """ user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}' def __init__(self, auth_token=None, user=None, tenant=None, domain=None, user_domain=None, project_domain=None, is_admin=False, read_only=False, show_deleted=False, request_id=None, instance_uuid=None): self.auth_token = auth_token self.user = user self.tenant = tenant self.domain = domain self.user_domain = user_domain self.project_domain = project_domain self.is_admin = is_admin self.read_only = read_only self.show_deleted = show_deleted self.instance_uuid = instance_uuid if not request_id: request_id = generate_request_id() self.request_id = request_id def to_dict(self): user_idt = ( self.user_idt_format.format(user=self.user or '-', tenant=self.tenant or '-', domain=self.domain or '-', user_domain=self.user_domain or '-', p_domain=self.project_domain or '-')) return {'user': self.user, 'tenant': self.tenant, 'domain': self.domain, 'user_domain': self.user_domain, 'project_domain': self.project_domain, 'is_admin': self.is_admin, 'read_only': self.read_only, 'show_deleted': self.show_deleted, 'auth_token': self.auth_token, 'request_id': self.request_id, 'instance_uuid': self.instance_uuid, 'user_identity': user_idt} def get_admin_context(show_deleted=False): context = RequestContext(None, tenant=None, is_admin=True, show_deleted=show_deleted) return context def get_context_from_function_and_args(function, args, kwargs): """Find an arg of type RequestContext and return it. This is useful in a couple of decorators where we don't know much about the function we're wrapping. """ for arg in itertools.chain(kwargs.values(), args): if isinstance(arg, RequestContext): return arg return None ceilometer-2014.1.5/ceilometer/openstack/common/middleware/0000775000567000056700000000000012540643223024761 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/middleware/request_id.py0000664000567000056700000000247212540642615027510 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that ensures request ID. It ensures to assign request ID for each API request and set it to request environment. The request ID is also added to API response. """ from ceilometer.openstack.common import context from ceilometer.openstack.common.middleware import base ENV_REQUEST_ID = 'openstack.request_id' HTTP_RESP_HEADER_REQUEST_ID = 'x-openstack-request-id' class RequestIdMiddleware(base.Middleware): def process_request(self, req): self.req_id = context.generate_request_id() req.environ[ENV_REQUEST_ID] = self.req_id def process_response(self, response): response.headers.add(HTTP_RESP_HEADER_REQUEST_ID, self.req_id) return response ceilometer-2014.1.5/ceilometer/openstack/common/middleware/catch_errors.py0000664000567000056700000000265312540642615030023 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that provides high-level error handling. It catches all exceptions from subsequent applications in WSGI pipeline to hide internal errors from API response. """ import webob.dec import webob.exc from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response ceilometer-2014.1.5/ceilometer/openstack/common/middleware/notifier.py0000664000567000056700000000767012540642615027170 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 eNovance # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Send notifications on request """ import os.path import sys import traceback as tb import six import webob.dec from ceilometer.openstack.common import context from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.middleware import base from ceilometer.openstack.common.notifier import api LOG = logging.getLogger(__name__) def log_and_ignore_error(fn): def wrapped(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: LOG.exception(_('An exception occurred processing ' 'the API call: %s ') % e) return wrapped class RequestNotifier(base.Middleware): """Send notification on request.""" @classmethod def factory(cls, global_conf, **local_conf): """Factory method for paste.deploy.""" conf = global_conf.copy() conf.update(local_conf) def _factory(app): return cls(app, **conf) return _factory def __init__(self, app, **conf): self.service_name = conf.get('service_name', None) self.ignore_req_list = [x.upper().strip() for x in conf.get('ignore_req_list', '').split(',')] super(RequestNotifier, self).__init__(app) @staticmethod def environ_to_dict(environ): """Following PEP 333, server variables are lower case, so don't include them. """ return dict((k, v) for k, v in six.iteritems(environ) if k.isupper() and k != 'HTTP_X_AUTH_TOKEN') @log_and_ignore_error def process_request(self, request): request.environ['HTTP_X_SERVICE_NAME'] = \ self.service_name or request.host payload = { 'request': self.environ_to_dict(request.environ), } api.notify(context.get_admin_context(), api.publisher_id(os.path.basename(sys.argv[0])), 'http.request', api.INFO, payload) @log_and_ignore_error def process_response(self, request, response, exception=None, traceback=None): payload = { 'request': self.environ_to_dict(request.environ), } if response: payload['response'] = { 'status': response.status, 'headers': response.headers, } if exception: payload['exception'] = { 'value': repr(exception), 'traceback': tb.format_tb(traceback) } api.notify(context.get_admin_context(), api.publisher_id(os.path.basename(sys.argv[0])), 'http.response', api.INFO, payload) @webob.dec.wsgify def __call__(self, req): if req.method in self.ignore_req_list: return req.get_response(self.application) else: self.process_request(req) try: response = req.get_response(self.application) except Exception: exc_type, value, traceback = sys.exc_info() self.process_response(req, None, value, traceback) raise else: self.process_response(req, response) return response ceilometer-2014.1.5/ceilometer/openstack/common/middleware/debug.py0000664000567000056700000000337212540642615026432 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Debug middleware""" from __future__ import print_function import sys import six import webob.dec from ceilometer.openstack.common.middleware import base class Debug(base.Middleware): """Helper class that returns debug information. Can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify def __call__(self, req): print(("*" * 40) + " REQUEST ENVIRON") for key, value in req.environ.items(): print(key, "=", value) print() resp = req.get_response(self.application) print(("*" * 40) + " RESPONSE HEADERS") for (key, value) in six.iteritems(resp.headers): print(key, "=", value) print() resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Prints the contents of a wrapper string iterator when iterated.""" print(("*" * 40) + " BODY") for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part print() ceilometer-2014.1.5/ceilometer/openstack/common/middleware/base.py0000664000567000056700000000347712540642615026264 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base class(es) for WSGI Middleware.""" import webob.dec class Middleware(object): """Base WSGI middleware wrapper. These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ @classmethod def factory(cls, global_conf, **local_conf): """Factory method for paste.deploy.""" return cls def __init__(self, application): self.application = application def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) return self.process_response(response) ceilometer-2014.1.5/ceilometer/openstack/common/middleware/audit.py0000664000567000056700000000325312540642615026450 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Attach open standard audit information to request.environ AuditMiddleware filter should be place after Keystone's auth_token middleware in the pipeline so that it can utilise the information Keystone provides. """ from pycadf.audit import api as cadf_api from ceilometer.openstack.common.middleware import notifier class AuditMiddleware(notifier.RequestNotifier): def __init__(self, app, **conf): super(AuditMiddleware, self).__init__(app, **conf) self.cadf_audit = cadf_api.OpenStackAuditApi() @notifier.log_and_ignore_error def process_request(self, request): self.cadf_audit.append_audit_event(request) super(AuditMiddleware, self).process_request(request) @notifier.log_and_ignore_error def process_response(self, request, response, exception=None, traceback=None): self.cadf_audit.mod_audit_event(request, response) super(AuditMiddleware, self).process_response(request, response, exception, traceback) ceilometer-2014.1.5/ceilometer/openstack/common/middleware/__init__.py0000664000567000056700000000000012540642615027064 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/middleware/correlation_id.py0000664000567000056700000000200112540642615030325 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that attaches a correlation id to WSGI request""" import uuid from ceilometer.openstack.common.middleware import base class CorrelationIdMiddleware(base.Middleware): def process_request(self, req): correlation_id = (req.headers.get("X_CORRELATION_ID") or str(uuid.uuid4())) req.headers['X_CORRELATION_ID'] = correlation_id ceilometer-2014.1.5/ceilometer/openstack/common/middleware/sizelimit.py0000664000567000056700000000531512540642615027354 0ustar jenkinsjenkins00000000000000# Copyright (c) 2012 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Request Body limiting middleware. """ from oslo.config import cfg import webob.dec import webob.exc from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common.middleware import base #default request size is 112k max_req_body_size = cfg.IntOpt('max_request_body_size', deprecated_name='osapi_max_request_body_size', default=114688, help='The maximum body size ' 'per request, in bytes') CONF = cfg.CONF CONF.register_opt(max_req_body_size) class LimitingReader(object): """Reader to limit the size of an incoming request.""" def __init__(self, data, limit): """Initiates LimitingReader object. :param data: Underlying data object :param limit: maximum number of bytes the reader should allow """ self.data = data self.limit = limit self.bytes_read = 0 def __iter__(self): for chunk in self.data: self.bytes_read += len(chunk) if self.bytes_read > self.limit: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) else: yield chunk def read(self, i=None): result = self.data.read(i) self.bytes_read += len(result) if self.bytes_read > self.limit: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) return result class RequestBodySizeLimiter(base.Middleware): """Limit the size of incoming requests.""" @webob.dec.wsgify def __call__(self, req): if req.content_length > CONF.max_request_body_size: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) if req.content_length is None and req.is_body_readable: limiter = LimitingReader(req.body_file, CONF.max_request_body_size) req.body_file = limiter return self.application ceilometer-2014.1.5/ceilometer/openstack/common/gettextutils.py0000664000567000056700000004220512540642615025772 0ustar jenkinsjenkins00000000000000# Copyright 2012 Red Hat, Inc. # Copyright 2013 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ gettext for openstack-common modules. Usual usage in an openstack.common module: from ceilometer.openstack.common.gettextutils import _ """ import copy import gettext import locale from logging import handlers import os import re from babel import localedata import six _localedir = os.environ.get('ceilometer'.upper() + '_LOCALEDIR') _t = gettext.translation('ceilometer', localedir=_localedir, fallback=True) _AVAILABLE_LANGUAGES = {} USE_LAZY = False def enable_lazy(): """Convenience function for configuring _() to use lazy gettext Call this at the start of execution to enable the gettextutils._ function to use lazy gettext functionality. This is useful if your project is importing _ directly instead of using the gettextutils.install() way of importing the _ function. """ global USE_LAZY USE_LAZY = True def _(msg): if USE_LAZY: return Message(msg, domain='ceilometer') else: if six.PY3: return _t.gettext(msg) return _t.ugettext(msg) def install(domain, lazy=False): """Install a _() function using the given translation domain. Given a translation domain, install a _() function using gettext's install() function. The main difference from gettext.install() is that we allow overriding the default localedir (e.g. /usr/share/locale) using a translation-domain-specific environment variable (e.g. NOVA_LOCALEDIR). :param domain: the translation domain :param lazy: indicates whether or not to install the lazy _() function. The lazy _() introduces a way to do deferred translation of messages by installing a _ that builds Message objects, instead of strings, which can then be lazily translated into any available locale. """ if lazy: # NOTE(mrodden): Lazy gettext functionality. # # The following introduces a deferred way to do translations on # messages in OpenStack. We override the standard _() function # and % (format string) operation to build Message objects that can # later be translated when we have more information. def _lazy_gettext(msg): """Create and return a Message object. Lazy gettext function for a given domain, it is a factory method for a project/module to get a lazy gettext function for its own translation domain (i.e. nova, glance, cinder, etc.) Message encapsulates a string so that we can translate it later when needed. """ return Message(msg, domain=domain) from six import moves moves.builtins.__dict__['_'] = _lazy_gettext else: localedir = '%s_LOCALEDIR' % domain.upper() if six.PY3: gettext.install(domain, localedir=os.environ.get(localedir)) else: gettext.install(domain, localedir=os.environ.get(localedir), unicode=True) class Message(six.text_type): """A Message object is a unicode object that can be translated. Translation of Message is done explicitly using the translate() method. For all non-translation intents and purposes, a Message is simply unicode, and can be treated as such. """ def __new__(cls, msgid, msgtext=None, params=None, domain='ceilometer', *args): """Create a new Message object. In order for translation to work gettext requires a message ID, this msgid will be used as the base unicode text. It is also possible for the msgid and the base unicode text to be different by passing the msgtext parameter. """ # If the base msgtext is not given, we use the default translation # of the msgid (which is in English) just in case the system locale is # not English, so that the base text will be in that locale by default. if not msgtext: msgtext = Message._translate_msgid(msgid, domain) # We want to initialize the parent unicode with the actual object that # would have been plain unicode if 'Message' was not enabled. msg = super(Message, cls).__new__(cls, msgtext) msg.msgid = msgid msg.domain = domain msg.params = params return msg def translate(self, desired_locale=None): """Translate this message to the desired locale. :param desired_locale: The desired locale to translate the message to, if no locale is provided the message will be translated to the system's default locale. :returns: the translated message in unicode """ translated_message = Message._translate_msgid(self.msgid, self.domain, desired_locale) if self.params is None: # No need for more translation return translated_message # This Message object may have been formatted with one or more # Message objects as substitution arguments, given either as a single # argument, part of a tuple, or as one or more values in a dictionary. # When translating this Message we need to translate those Messages too translated_params = _translate_args(self.params, desired_locale) translated_message = translated_message % translated_params return translated_message @staticmethod def _translate_msgid(msgid, domain, desired_locale=None): if not desired_locale: system_locale = locale.getdefaultlocale() # If the system locale is not available to the runtime use English if not system_locale[0]: desired_locale = 'en_US' else: desired_locale = system_locale[0] locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') lang = gettext.translation(domain, localedir=locale_dir, languages=[desired_locale], fallback=True) if six.PY3: translator = lang.gettext else: translator = lang.ugettext translated_message = translator(msgid) return translated_message def __mod__(self, other): # When we mod a Message we want the actual operation to be performed # by the parent class (i.e. unicode()), the only thing we do here is # save the original msgid and the parameters in case of a translation params = self._sanitize_mod_params(other) unicode_mod = super(Message, self).__mod__(params) modded = Message(self.msgid, msgtext=unicode_mod, params=params, domain=self.domain) return modded def _sanitize_mod_params(self, other): """Sanitize the object being modded with this Message. - Add support for modding 'None' so translation supports it - Trim the modded object, which can be a large dictionary, to only those keys that would actually be used in a translation - Snapshot the object being modded, in case the message is translated, it will be used as it was when the Message was created """ if other is None: params = (other,) elif isinstance(other, dict): params = self._trim_dictionary_parameters(other) else: params = self._copy_param(other) return params def _trim_dictionary_parameters(self, dict_param): """Return a dict that only has matching entries in the msgid.""" # NOTE(luisg): Here we trim down the dictionary passed as parameters # to avoid carrying a lot of unnecessary weight around in the message # object, for example if someone passes in Message() % locals() but # only some params are used, and additionally we prevent errors for # non-deepcopyable objects by unicoding() them. # Look for %(param) keys in msgid; # Skip %% and deal with the case where % is first character on the line keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid) # If we don't find any %(param) keys but have a %s if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid): # Apparently the full dictionary is the parameter params = self._copy_param(dict_param) else: params = {} # Save our existing parameters as defaults to protect # ourselves from losing values if we are called through an # (erroneous) chain that builds a valid Message with # arguments, and then does something like "msg % kwds" # where kwds is an empty dictionary. src = {} if isinstance(self.params, dict): src.update(self.params) src.update(dict_param) for key in keys: params[key] = self._copy_param(src[key]) return params def _copy_param(self, param): try: return copy.deepcopy(param) except TypeError: # Fallback to casting to unicode this will handle the # python code-like objects that can't be deep-copied return six.text_type(param) def __add__(self, other): msg = _('Message objects do not support addition.') raise TypeError(msg) def __radd__(self, other): return self.__add__(other) def __str__(self): # NOTE(luisg): Logging in python 2.6 tries to str() log records, # and it expects specifically a UnicodeError in order to proceed. msg = _('Message objects do not support str() because they may ' 'contain non-ascii characters. ' 'Please use unicode() or translate() instead.') raise UnicodeError(msg) def get_available_languages(domain): """Lists the available languages for the given translation domain. :param domain: the domain to get languages for """ if domain in _AVAILABLE_LANGUAGES: return copy.copy(_AVAILABLE_LANGUAGES[domain]) localedir = '%s_LOCALEDIR' % domain.upper() find = lambda x: gettext.find(domain, localedir=os.environ.get(localedir), languages=[x]) # NOTE(mrodden): en_US should always be available (and first in case # order matters) since our in-line message strings are en_US language_list = ['en_US'] # NOTE(luisg): Babel <1.0 used a function called list(), which was # renamed to locale_identifiers() in >=1.0, the requirements master list # requires >=0.9.6, uncapped, so defensively work with both. We can remove # this check when the master list updates to >=1.0, and update all projects list_identifiers = (getattr(localedata, 'list', None) or getattr(localedata, 'locale_identifiers')) locale_identifiers = list_identifiers() for i in locale_identifiers: if find(i) is not None: language_list.append(i) # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they # are perfectly legitimate locales: # https://github.com/mitsuhiko/babel/issues/37 # In Babel 1.3 they fixed the bug and they support these locales, but # they are still not explicitly "listed" by locale_identifiers(). # That is why we add the locales here explicitly if necessary so that # they are listed as supported. aliases = {'zh': 'zh_CN', 'zh_Hant_HK': 'zh_HK', 'zh_Hant': 'zh_TW', 'fil': 'tl_PH'} for (locale, alias) in six.iteritems(aliases): if locale in language_list and alias not in language_list: language_list.append(alias) _AVAILABLE_LANGUAGES[domain] = language_list return copy.copy(language_list) def translate(obj, desired_locale=None): """Gets the translated unicode representation of the given object. If the object is not translatable it is returned as-is. If the locale is None the object is translated to the system locale. :param obj: the object to translate :param desired_locale: the locale to translate the message to, if None the default system locale will be used :returns: the translated object in unicode, or the original object if it could not be translated """ message = obj if not isinstance(message, Message): # If the object to translate is not already translatable, # let's first get its unicode representation message = six.text_type(obj) if isinstance(message, Message): # Even after unicoding() we still need to check if we are # running with translatable unicode before translating return message.translate(desired_locale) return obj def _translate_args(args, desired_locale=None): """Translates all the translatable elements of the given arguments object. This method is used for translating the translatable values in method arguments which include values of tuples or dictionaries. If the object is not a tuple or a dictionary the object itself is translated if it is translatable. If the locale is None the object is translated to the system locale. :param args: the args to translate :param desired_locale: the locale to translate the args to, if None the default system locale will be used :returns: a new args object with the translated contents of the original """ if isinstance(args, tuple): return tuple(translate(v, desired_locale) for v in args) if isinstance(args, dict): translated_dict = {} for (k, v) in six.iteritems(args): translated_v = translate(v, desired_locale) translated_dict[k] = translated_v return translated_dict return translate(args, desired_locale) class TranslationHandler(handlers.MemoryHandler): """Handler that translates records before logging them. The TranslationHandler takes a locale and a target logging.Handler object to forward LogRecord objects to after translating them. This handler depends on Message objects being logged, instead of regular strings. The handler can be configured declaratively in the logging.conf as follows: [handlers] keys = translatedlog, translator [handler_translatedlog] class = handlers.WatchedFileHandler args = ('/var/log/api-localized.log',) formatter = context [handler_translator] class = openstack.common.log.TranslationHandler target = translatedlog args = ('zh_CN',) If the specified locale is not available in the system, the handler will log in the default locale. """ def __init__(self, locale=None, target=None): """Initialize a TranslationHandler :param locale: locale to use for translating messages :param target: logging.Handler object to forward LogRecord objects to after translation """ # NOTE(luisg): In order to allow this handler to be a wrapper for # other handlers, such as a FileHandler, and still be able to # configure it using logging.conf, this handler has to extend # MemoryHandler because only the MemoryHandlers' logging.conf # parsing is implemented such that it accepts a target handler. handlers.MemoryHandler.__init__(self, capacity=0, target=target) self.locale = locale def setFormatter(self, fmt): self.target.setFormatter(fmt) def emit(self, record): # We save the message from the original record to restore it # after translation, so other handlers are not affected by this original_msg = record.msg original_args = record.args try: self._translate_and_log_record(record) finally: record.msg = original_msg record.args = original_args def _translate_and_log_record(self, record): record.msg = translate(record.msg, self.locale) # In addition to translating the message, we also need to translate # arguments that were passed to the log method that were not part # of the main message e.g., log.info(_('Some message %s'), this_one)) record.args = _translate_args(record.args, self.locale) self.target.emit(record) ceilometer-2014.1.5/ceilometer/openstack/common/sslutils.py0000664000567000056700000000543512540642615025113 0ustar jenkinsjenkins00000000000000# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import ssl from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ ssl_opts = [ cfg.StrOpt('ca_file', default=None, help="CA certificate file to use to verify " "connecting clients"), cfg.StrOpt('cert_file', default=None, help="Certificate file to use when starting " "the server securely"), cfg.StrOpt('key_file', default=None, help="Private key file to use when starting " "the server securely"), ] CONF = cfg.CONF CONF.register_opts(ssl_opts, "ssl") def is_enabled(): cert_file = CONF.ssl.cert_file key_file = CONF.ssl.key_file ca_file = CONF.ssl.ca_file use_ssl = cert_file or key_file if cert_file and not os.path.exists(cert_file): raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) if ca_file and not os.path.exists(ca_file): raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) if key_file and not os.path.exists(key_file): raise RuntimeError(_("Unable to find key_file : %s") % key_file) if use_ssl and (not cert_file or not key_file): raise RuntimeError(_("When running server in SSL mode, you must " "specify both a cert_file and key_file " "option value in your configuration file")) return use_ssl def wrap(sock): ssl_kwargs = { 'server_side': True, 'certfile': CONF.ssl.cert_file, 'keyfile': CONF.ssl.key_file, 'cert_reqs': ssl.CERT_NONE, } if CONF.ssl.ca_file: ssl_kwargs['ca_certs'] = CONF.ssl.ca_file ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED return ssl.wrap_socket(sock, **ssl_kwargs) _SSL_PROTOCOLS = { "tlsv1": ssl.PROTOCOL_TLSv1, "sslv23": ssl.PROTOCOL_SSLv23, "sslv3": ssl.PROTOCOL_SSLv3 } try: _SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2 except AttributeError: pass def validate_ssl_version(version): key = version.lower() try: return _SSL_PROTOCOLS[key] except KeyError: raise RuntimeError(_("Invalid SSL version : %s") % version) ceilometer-2014.1.5/ceilometer/openstack/common/jsonutils.py0000664000567000056700000001510312540642615025254 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ''' JSON related utilities. This module provides a few things: 1) A handy function for getting an object down to something that can be JSON serialized. See to_primitive(). 2) Wrappers around loads() and dumps(). The dumps() wrapper will automatically use to_primitive() for you if needed. 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson is available. ''' import datetime import functools import inspect import itertools import json try: import xmlrpclib except ImportError: # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3 # however the function and object call signatures # remained the same. This whole try/except block should # be removed and replaced with a call to six.moves once # six 1.4.2 is released. See http://bit.ly/1bqrVzu import xmlrpc.client as xmlrpclib import six from ceilometer.openstack.common import gettextutils from ceilometer.openstack.common import importutils from ceilometer.openstack.common import timeutils netaddr = importutils.try_import("netaddr") _nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, inspect.isfunction, inspect.isgeneratorfunction, inspect.isgenerator, inspect.istraceback, inspect.isframe, inspect.iscode, inspect.isbuiltin, inspect.isroutine, inspect.isabstract] _simple_types = (six.string_types + six.integer_types + (type(None), bool, float)) def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): """Convert a complex object into primitives. Handy for JSON serialization. We can optionally handle instances, but since this is a recursive function, we could have cyclical data structures. To handle cyclical data structures we could track the actual objects visited in a set, but not all objects are hashable. Instead we just track the depth of the object inspections and don't go too deep. Therefore, convert_instances=True is lossy ... be aware. """ # handle obvious types first - order of basic types determined by running # full tests on nova project, resulting in the following counts: # 572754 # 460353 # 379632 # 274610 # 199918 # 114200 # 51817 # 26164 # 6491 # 283 # 19 if isinstance(value, _simple_types): return value if isinstance(value, datetime.datetime): if convert_datetime: return timeutils.strtime(value) else: return value # value of itertools.count doesn't get caught by nasty_type_tests # and results in infinite loop when list(value) is called. if type(value) == itertools.count: return six.text_type(value) # FIXME(vish): Workaround for LP bug 852095. Without this workaround, # tests that raise an exception in a mocked method that # has a @wrap_exception with a notifier will fail. If # we up the dependency to 0.5.4 (when it is released) we # can remove this workaround. if getattr(value, '__module__', None) == 'mox': return 'mock' if level > max_depth: return '?' # The try block may not be necessary after the class check above, # but just in case ... try: recursive = functools.partial(to_primitive, convert_instances=convert_instances, convert_datetime=convert_datetime, level=level, max_depth=max_depth) if isinstance(value, dict): return dict((k, recursive(v)) for k, v in six.iteritems(value)) elif isinstance(value, (list, tuple)): return [recursive(lv) for lv in value] # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled if isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) elif isinstance(value, gettextutils.Message): return value.data elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) elif hasattr(value, '__iter__'): return recursive(list(value)) elif convert_instances and hasattr(value, '__dict__'): # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) elif netaddr and isinstance(value, netaddr.IPAddress): return six.text_type(value) else: if any(test(value) for test in _nasty_type_tests): return six.text_type(value) return value except TypeError: # Class objects are tricky since they may define something like # __iter__ defined but it isn't callable as list(). return six.text_type(value) def dumps(value, default=to_primitive, **kwargs): return json.dumps(value, default=default, **kwargs) def loads(s): return json.loads(s) def load(s): return json.load(s) try: import anyjson except ImportError: pass else: anyjson._modules.append((__name__, 'dumps', TypeError, 'loads', ValueError, 'load')) anyjson.force_implementation(__name__) ceilometer-2014.1.5/ceilometer/openstack/common/timeutils.py0000664000567000056700000001424112540642615025243 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Time related utilities and helper functions. """ import calendar import datetime import time import iso8601 import six # ISO 8601 extended time format with microseconds _ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND def isotime(at=None, subsecond=False): """Stringify time in ISO 8601 format.""" if not at: at = utcnow() st = at.strftime(_ISO8601_TIME_FORMAT if not subsecond else _ISO8601_TIME_FORMAT_SUBSECOND) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st def parse_isotime(timestr): """Parse time from ISO 8601 format.""" try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: raise ValueError(six.text_type(e)) except TypeError as e: raise ValueError(six.text_type(e)) def strtime(at=None, fmt=PERFECT_TIME_FORMAT): """Returns formatted utcnow.""" if not at: at = utcnow() return at.strftime(fmt) def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): """Turn a formatted time back into a datetime.""" return datetime.datetime.strptime(timestr, fmt) def normalize_time(timestamp): """Normalize time in arbitrary timezone to UTC naive object.""" offset = timestamp.utcoffset() if offset is None: return timestamp return timestamp.replace(tzinfo=None) - offset def is_older_than(before, seconds): """Return True if before is older than seconds.""" if isinstance(before, six.string_types): before = parse_strtime(before).replace(tzinfo=None) else: before = before.replace(tzinfo=None) return utcnow() - before > datetime.timedelta(seconds=seconds) def is_newer_than(after, seconds): """Return True if after is newer than seconds.""" if isinstance(after, six.string_types): after = parse_strtime(after).replace(tzinfo=None) else: after = after.replace(tzinfo=None) return after - utcnow() > datetime.timedelta(seconds=seconds) def utcnow_ts(): """Timestamp version of our utcnow function.""" if utcnow.override_time is None: # NOTE(kgriffs): This is several times faster # than going through calendar.timegm(...) return int(time.time()) return calendar.timegm(utcnow().timetuple()) def utcnow(): """Overridable version of utils.utcnow.""" if utcnow.override_time: try: return utcnow.override_time.pop(0) except AttributeError: return utcnow.override_time return datetime.datetime.utcnow() def iso8601_from_timestamp(timestamp): """Returns a iso8601 formatted date from timestamp.""" return isotime(datetime.datetime.utcfromtimestamp(timestamp)) utcnow.override_time = None def set_time_override(override_time=None): """Overrides utils.utcnow. Make it return a constant time or a list thereof, one at a time. :param override_time: datetime instance or list thereof. If not given, defaults to the current UTC time. """ utcnow.override_time = override_time or datetime.datetime.utcnow() def advance_time_delta(timedelta): """Advance overridden time using a datetime.timedelta.""" assert(not utcnow.override_time is None) try: for dt in utcnow.override_time: dt += timedelta except TypeError: utcnow.override_time += timedelta def advance_time_seconds(seconds): """Advance overridden time by seconds.""" advance_time_delta(datetime.timedelta(0, seconds)) def clear_time_override(): """Remove the overridden time.""" utcnow.override_time = None def marshall_now(now=None): """Make an rpc-safe datetime with microseconds. Note: tzinfo is stripped, but not required for relative times. """ if not now: now = utcnow() return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, minute=now.minute, second=now.second, microsecond=now.microsecond) def unmarshall_time(tyme): """Unmarshall a datetime dict.""" return datetime.datetime(day=tyme['day'], month=tyme['month'], year=tyme['year'], hour=tyme['hour'], minute=tyme['minute'], second=tyme['second'], microsecond=tyme['microsecond']) def delta_seconds(before, after): """Return the difference between two timing objects. Compute the difference in seconds between two date, time, or datetime objects (as a float, to microsecond resolution). """ delta = after - before return total_seconds(delta) def total_seconds(delta): """Return the total seconds of datetime.timedelta object. Compute total seconds of datetime.timedelta, datetime.timedelta doesn't have method total_seconds in Python2.6, calculate it manually. """ try: return delta.total_seconds() except AttributeError: return ((delta.days * 24 * 3600) + delta.seconds + float(delta.microseconds) / (10 ** 6)) def is_soon(dt, window): """Determines if time is going to happen in the next window seconds. :param dt: the time :param window: minimum seconds to remain to consider the time not soon :return: True if expiration is within the given duration """ soon = (utcnow() + datetime.timedelta(seconds=window)) return normalize_time(dt) <= soon ceilometer-2014.1.5/ceilometer/openstack/common/excutils.py0000664000567000056700000000717512540642615025074 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # Copyright 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Exception related utilities. """ import logging import sys import time import traceback import six from ceilometer.openstack.common.gettextutils import _ class save_and_reraise_exception(object): """Save current exception, run some code and then re-raise. In some cases the exception context can be cleared, resulting in None being attempted to be re-raised after an exception handler is run. This can happen when eventlet switches greenthreads or when running an exception handler, code raises and catches an exception. In both cases the exception context will be cleared. To work around this, we save the exception state, run handler code, and then re-raise the original exception. If another exception occurs, the saved exception is logged and the new exception is re-raised. In some cases the caller may not want to re-raise the exception, and for those circumstances this context provides a reraise flag that can be used to suppress the exception. For example:: except Exception: with save_and_reraise_exception() as ctxt: decide_if_need_reraise() if not should_be_reraised: ctxt.reraise = False """ def __init__(self): self.reraise = True def __enter__(self): self.type_, self.value, self.tb, = sys.exc_info() return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: logging.error(_('Original exception being dropped: %s'), traceback.format_exception(self.type_, self.value, self.tb)) return False if self.reraise: six.reraise(self.type_, self.value, self.tb) def forever_retry_uncaught_exceptions(infunc): def inner_func(*args, **kwargs): last_log_time = 0 last_exc_message = None exc_count = 0 while True: try: return infunc(*args, **kwargs) except Exception as exc: this_exc_message = six.u(str(exc)) if this_exc_message == last_exc_message: exc_count += 1 else: exc_count = 1 # Do not log any more frequently than once a minute unless # the exception message changes cur_time = int(time.time()) if (cur_time - last_log_time > 60 or this_exc_message != last_exc_message): logging.exception( _('Unexpected exception occurred %d time(s)... ' 'retrying.') % exc_count) last_log_time = cur_time last_exc_message = this_exc_message exc_count = 0 # This should be a very rare event. In case it isn't, do # a sleep. time.sleep(1) return inner_func ceilometer-2014.1.5/ceilometer/openstack/common/network_utils.py0000664000567000056700000000520112540642615026131 0ustar jenkinsjenkins00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Network-related utilities and helper functions. """ from ceilometer.openstack.common.py3kcompat import urlutils def parse_host_port(address, default_port=None): """Interpret a string as a host:port pair. An IPv6 address MUST be escaped if accompanied by a port, because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 means both [2001:db8:85a3::8a2e:370:7334] and [2001:db8:85a3::8a2e:370]:7334. >>> parse_host_port('server01:80') ('server01', 80) >>> parse_host_port('server01') ('server01', None) >>> parse_host_port('server01', default_port=1234) ('server01', 1234) >>> parse_host_port('[::1]:80') ('::1', 80) >>> parse_host_port('[::1]') ('::1', None) >>> parse_host_port('[::1]', default_port=1234) ('::1', 1234) >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234) ('2001:db8:85a3::8a2e:370:7334', 1234) """ if address[0] == '[': # Escaped ipv6 _host, _port = address[1:].split(']') host = _host if ':' in _port: port = _port.split(':')[1] else: port = default_port else: if address.count(':') == 1: host, port = address.split(':') else: # 0 means ipv4, >1 means ipv6. # We prohibit unescaped ipv6 addresses with port. host = address port = default_port return (host, None if port is None else int(port)) def urlsplit(url, scheme='', allow_fragments=True): """Parse a URL using urlparse.urlsplit(), splitting query and fragments. This function papers over Python issue9374 when needed. The parameters are the same as urlparse.urlsplit. """ scheme, netloc, path, query, fragment = urlutils.urlsplit( url, scheme, allow_fragments) if allow_fragments and '#' in path: path, fragment = path.split('#', 1) if '?' in path: path, query = path.split('?', 1) return urlutils.SplitResult(scheme, netloc, path, query, fragment) ceilometer-2014.1.5/ceilometer/openstack/common/test.py0000664000567000056700000000577112540642615024213 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Common utilities used in testing""" import logging import os import tempfile import fixtures import testtools _TRUE_VALUES = ('True', 'true', '1', 'yes') _LOG_FORMAT = "%(levelname)8s [%(name)s] %(message)s" class BaseTestCase(testtools.TestCase): def setUp(self): super(BaseTestCase, self).setUp() self._set_timeout() self._fake_output() self._fake_logs() self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) self.tempdirs = [] def _set_timeout(self): test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except ValueError: # If timeout value is invalid do not set a timeout. test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) def _fake_output(self): if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES: stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES: stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) def _fake_logs(self): if os.environ.get('OS_DEBUG') in _TRUE_VALUES: level = logging.DEBUG else: level = logging.INFO capture_logs = os.environ.get('OS_LOG_CAPTURE') in _TRUE_VALUES if capture_logs: self.useFixture( fixtures.FakeLogger( format=_LOG_FORMAT, level=level, nuke_handlers=capture_logs, ) ) else: logging.basicConfig(format=_LOG_FORMAT, level=level) def create_tempfiles(self, files, ext='.conf'): tempfiles = [] for (basename, contents) in files: if not os.path.isabs(basename): (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) else: path = basename + ext fd = os.open(path, os.O_CREAT | os.O_WRONLY) tempfiles.append(path) try: os.write(fd, contents) finally: os.close(fd) return tempfiles ceilometer-2014.1.5/ceilometer/openstack/common/loopingcall.py0000664000567000056700000001103012540642615025520 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from eventlet import event from eventlet import greenthread from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import timeutils LOG = logging.getLogger(__name__) class LoopingCallDone(Exception): """Exception to break out and stop a LoopingCall. The poll-function passed to LoopingCall can raise this exception to break out of the loop normally. This is somewhat analogous to StopIteration. An optional return-value can be included as the argument to the exception; this return-value will be returned by LoopingCall.wait() """ def __init__(self, retvalue=True): """:param retvalue: Value that LoopingCall.wait() should return.""" self.retvalue = retvalue class LoopingCallBase(object): def __init__(self, f=None, *args, **kw): self.args = args self.kw = kw self.f = f self._running = False self.done = None def stop(self): self._running = False def wait(self): return self.done.wait() class FixedIntervalLoopingCall(LoopingCallBase): """A fixed interval looping call.""" def start(self, interval, initial_delay=None): self._running = True done = event.Event() def _inner(): if initial_delay: greenthread.sleep(initial_delay) try: while self._running: start = timeutils.utcnow() self.f(*self.args, **self.kw) end = timeutils.utcnow() if not self._running: break delay = interval - timeutils.delta_seconds(start, end) if delay <= 0: LOG.warn(_('task run outlasted interval by %s sec') % -delay) greenthread.sleep(delay if delay > 0 else 0) except LoopingCallDone as e: self.stop() done.send(e.retvalue) except Exception: LOG.exception(_('in fixed duration looping call')) done.send_exception(*sys.exc_info()) return else: done.send(True) self.done = done greenthread.spawn_n(_inner) return self.done # TODO(mikal): this class name is deprecated in Havana and should be removed # in the I release LoopingCall = FixedIntervalLoopingCall class DynamicLoopingCall(LoopingCallBase): """A looping call which sleeps until the next known event. The function called should return how long to sleep for before being called again. """ def start(self, initial_delay=None, periodic_interval_max=None): self._running = True done = event.Event() def _inner(): if initial_delay: greenthread.sleep(initial_delay) try: while self._running: idle = self.f(*self.args, **self.kw) if not self._running: break if periodic_interval_max is not None: idle = min(idle, periodic_interval_max) LOG.debug(_('Dynamic looping call sleeping for %.02f ' 'seconds'), idle) greenthread.sleep(idle) except LoopingCallDone as e: self.stop() done.send(e.retvalue) except Exception: LOG.exception(_('in dynamic looping call')) done.send_exception(*sys.exc_info()) return else: done.send(True) self.done = done greenthread.spawn(_inner) return self.done ceilometer-2014.1.5/ceilometer/openstack/common/notifier/0000775000567000056700000000000012540643223024463 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/notifier/rpc_notifier.py0000664000567000056700000000334412540642615027530 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from ceilometer.openstack.common import context as req_context from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import rpc LOG = logging.getLogger(__name__) notification_topic_opt = cfg.ListOpt( 'notification_topics', default=['notifications', ], help='AMQP topic used for OpenStack notifications') CONF = cfg.CONF CONF.register_opt(notification_topic_opt) def notify(context, message): """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', CONF.default_notification_level) priority = priority.lower() for topic in CONF.notification_topics: topic = '%s.%s' % (topic, priority) try: rpc.notify(context, topic, message) except Exception: LOG.exception(_("Could not send notification to %(topic)s. " "Payload=%(message)s"), {"topic": topic, "message": message}) ceilometer-2014.1.5/ceilometer/openstack/common/notifier/test_notifier.py0000664000567000056700000000143212540642615027717 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. NOTIFICATIONS = [] def notify(_context, message): """Test notifier, stores notifications in memory for unittests.""" NOTIFICATIONS.append(message) ceilometer-2014.1.5/ceilometer/openstack/common/notifier/log_notifier.py0000664000567000056700000000236612540642615027530 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import log as logging CONF = cfg.CONF def notify(_context, message): """Notifies the recipient of the desired event given the model. Log notifications using OpenStack's default logging system. """ priority = message.get('priority', CONF.default_notification_level) priority = priority.lower() logger = logging.getLogger( 'ceilometer.openstack.common.notification.%s' % message['event_type']) getattr(logger, priority)(jsonutils.dumps(message)) ceilometer-2014.1.5/ceilometer/openstack/common/notifier/__init__.py0000664000567000056700000000000012540642615026566 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/notifier/api.py0000664000567000056700000001263312540642615025617 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import socket import uuid from oslo.config import cfg from ceilometer.openstack.common import context from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import timeutils LOG = logging.getLogger(__name__) notifier_opts = [ cfg.MultiStrOpt('notification_driver', default=[], help='Driver or drivers to handle sending notifications'), cfg.StrOpt('default_notification_level', default='INFO', help='Default notification level for outgoing notifications'), cfg.StrOpt('default_publisher_id', default=None, help='Default publisher_id for outgoing notifications'), ] CONF = cfg.CONF CONF.register_opts(notifier_opts) WARN = 'WARN' INFO = 'INFO' ERROR = 'ERROR' CRITICAL = 'CRITICAL' DEBUG = 'DEBUG' log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL) class BadPriorityException(Exception): pass def notify_decorator(name, fn): """Decorator for notify which is used from utils.monkey_patch(). :param name: name of the function :param function: - object of the function :returns: function -- decorated function """ def wrapped_func(*args, **kwarg): body = {} body['args'] = [] body['kwarg'] = {} for arg in args: body['args'].append(arg) for key in kwarg: body['kwarg'][key] = kwarg[key] ctxt = context.get_context_from_function_and_args(fn, args, kwarg) notify(ctxt, CONF.default_publisher_id or socket.gethostname(), name, CONF.default_notification_level, body) return fn(*args, **kwarg) return wrapped_func def publisher_id(service, host=None): if not host: try: host = CONF.host except AttributeError: host = CONF.default_publisher_id or socket.gethostname() return "%s.%s" % (service, host) def notify(context, publisher_id, event_type, priority, payload): """Sends a notification using the specified driver :param publisher_id: the source worker_type.host of the message :param event_type: the literal type of event (ex. Instance Creation) :param priority: patterned after the enumeration of Python logging levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL) :param payload: A python dictionary of attributes Outgoing message format includes the above parameters, and appends the following: message_id a UUID representing the id for this notification timestamp the GMT timestamp the notification was sent at The composite message will be constructed as a dictionary of the above attributes, which will then be sent via the transport mechanism defined by the driver. Message example:: {'message_id': str(uuid.uuid4()), 'publisher_id': 'compute.host1', 'timestamp': timeutils.utcnow(), 'priority': 'WARN', 'event_type': 'compute.create_instance', 'payload': {'instance_id': 12, ... }} """ if priority not in log_levels: raise BadPriorityException( _('%s not in valid priorities') % priority) # Ensure everything is JSON serializable. payload = jsonutils.to_primitive(payload, convert_instances=True) msg = dict(message_id=str(uuid.uuid4()), publisher_id=publisher_id, event_type=event_type, priority=priority, payload=payload, timestamp=str(timeutils.utcnow())) for driver in _get_drivers(): try: driver.notify(context, msg) except Exception as e: LOG.exception(_("Problem '%(e)s' attempting to " "send to notification system. " "Payload=%(payload)s") % dict(e=e, payload=payload)) _drivers = None def _get_drivers(): """Instantiate, cache, and return drivers based on the CONF.""" global _drivers if _drivers is None: _drivers = {} for notification_driver in CONF.notification_driver: try: driver = importutils.import_module(notification_driver) _drivers[notification_driver] = driver except ImportError: LOG.exception(_("Failed to load notifier %s. " "These notifications will not be sent.") % notification_driver) return _drivers.values() def _reset_drivers(): """Used by unit tests to reset the drivers.""" global _drivers _drivers = None ceilometer-2014.1.5/ceilometer/openstack/common/notifier/rpc_notifier2.py0000664000567000056700000000370312540642615027611 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. '''messaging based notification driver, with message envelopes''' from oslo.config import cfg from ceilometer.openstack.common import context as req_context from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import rpc LOG = logging.getLogger(__name__) notification_topic_opt = cfg.ListOpt( 'topics', default=['notifications', ], help='AMQP topic(s) used for OpenStack notifications') opt_group = cfg.OptGroup(name='rpc_notifier2', title='Options for rpc_notifier2') CONF = cfg.CONF CONF.register_group(opt_group) CONF.register_opt(notification_topic_opt, opt_group) def notify(context, message): """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', CONF.default_notification_level) priority = priority.lower() for topic in CONF.rpc_notifier2.topics: topic = '%s.%s' % (topic, priority) try: rpc.notify(context, topic, message, envelope=True) except Exception: LOG.exception(_("Could not send notification to %(topic)s. " "Payload=%(message)s"), {"topic": topic, "message": message}) ceilometer-2014.1.5/ceilometer/openstack/common/notifier/no_op_notifier.py0000664000567000056700000000135512540642615030056 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def notify(_context, message): """Notifies the recipient of the desired event given the model.""" pass ceilometer-2014.1.5/ceilometer/openstack/common/notifier/proxy.py0000664000567000056700000000476612540642615026237 0ustar jenkinsjenkins00000000000000# Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A temporary helper which emulates ceilometer.messaging.Notifier. This helper method allows us to do the tedious porting to the new Notifier API as a standalone commit so that the commit which switches us to ceilometer.messaging is smaller and easier to review. This file will be removed as part of that commit. """ from oslo.config import cfg from ceilometer.openstack.common.notifier import api as notifier_api CONF = cfg.CONF class Notifier(object): def __init__(self, publisher_id): super(Notifier, self).__init__() self.publisher_id = publisher_id _marker = object() def prepare(self, publisher_id=_marker): ret = self.__class__(self.publisher_id) if publisher_id is not self._marker: ret.publisher_id = publisher_id return ret def _notify(self, ctxt, event_type, payload, priority): notifier_api.notify(ctxt, self.publisher_id, event_type, priority, payload) def audit(self, ctxt, event_type, payload): # No audit in old notifier. self._notify(ctxt, event_type, payload, 'INFO') def debug(self, ctxt, event_type, payload): self._notify(ctxt, event_type, payload, 'DEBUG') def info(self, ctxt, event_type, payload): self._notify(ctxt, event_type, payload, 'INFO') def warn(self, ctxt, event_type, payload): self._notify(ctxt, event_type, payload, 'WARN') warning = warn def error(self, ctxt, event_type, payload): self._notify(ctxt, event_type, payload, 'ERROR') def critical(self, ctxt, event_type, payload): self._notify(ctxt, event_type, payload, 'CRITICAL') def get_notifier(service=None, host=None, publisher_id=None): if not publisher_id: publisher_id = "%s.%s" % (service, host or CONF.host) return Notifier(publisher_id) ceilometer-2014.1.5/ceilometer/openstack/common/policy.py0000664000567000056700000006252512540642615024533 0ustar jenkinsjenkins00000000000000# Copyright (c) 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Common Policy Engine Implementation Policies can be expressed in one of two forms: A list of lists, or a string written in the new policy language. In the list-of-lists representation, each check inside the innermost list is combined as with an "and" conjunction--for that check to pass, all the specified checks must pass. These innermost lists are then combined as with an "or" conjunction. This is the original way of expressing policies, but there now exists a new way: the policy language. In the policy language, each check is specified the same way as in the list-of-lists representation: a simple "a:b" pair that is matched to the correct code to perform that check. However, conjunction operators are available, allowing for more expressiveness in crafting policies. As an example, take the following rule, expressed in the list-of-lists representation:: [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] In the policy language, this becomes:: role:admin or (project_id:%(project_id)s and role:projectadmin) The policy language also has the "not" operator, allowing a richer policy rule:: project_id:%(project_id)s and not role:dunce Finally, two special policy checks should be mentioned; the policy check "@" will always accept an access, and the policy check "!" will always reject an access. (Note that if a rule is either the empty list ("[]") or the empty string, this is equivalent to the "@" policy check.) Of these, the "!" policy check is probably the most useful, as it allows particular rules to be explicitly disabled. """ import abc import re from oslo.config import cfg import six import six.moves.urllib.parse as urlparse import six.moves.urllib.request as urlrequest from ceilometer.openstack.common import fileutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import log as logging policy_opts = [ cfg.StrOpt('policy_file', default='policy.json', help=_('JSON file containing policy')), cfg.StrOpt('policy_default_rule', default='default', help=_('Rule enforced when requested rule is not found')), ] CONF = cfg.CONF CONF.register_opts(policy_opts) LOG = logging.getLogger(__name__) _checks = {} class PolicyNotAuthorized(Exception): def __init__(self, rule): msg = _("Policy doesn't allow %s to be performed.") % rule super(PolicyNotAuthorized, self).__init__(msg) class Rules(dict): """A store for rules. Handles the default_rule setting directly.""" @classmethod def load_json(cls, data, default_rule=None): """Allow loading of JSON rule data.""" # Suck in the JSON data and parse the rules rules = dict((k, parse_rule(v)) for k, v in jsonutils.loads(data).items()) return cls(rules, default_rule) def __init__(self, rules=None, default_rule=None): """Initialize the Rules store.""" super(Rules, self).__init__(rules or {}) self.default_rule = default_rule def __missing__(self, key): """Implements the default rule handling.""" if isinstance(self.default_rule, dict): raise KeyError(key) # If the default rule isn't actually defined, do something # reasonably intelligent if not self.default_rule: raise KeyError(key) if isinstance(self.default_rule, BaseCheck): return self.default_rule # We need to check this or we can get infinite recursion if self.default_rule not in self: raise KeyError(key) elif isinstance(self.default_rule, six.string_types): return self[self.default_rule] def __str__(self): """Dumps a string representation of the rules.""" # Start by building the canonical strings for the rules out_rules = {} for key, value in self.items(): # Use empty string for singleton TrueCheck instances if isinstance(value, TrueCheck): out_rules[key] = '' else: out_rules[key] = str(value) # Dump a pretty-printed JSON representation return jsonutils.dumps(out_rules, indent=4) class Enforcer(object): """Responsible for loading and enforcing rules. :param policy_file: Custom policy file to use, if none is specified, `CONF.policy_file` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. If `load_rules(True)`, `clear()` or `set_rules(True)` is called this will be overwritten. :param default_rule: Default rule to use, CONF.default_rule will be used if none is specified. """ def __init__(self, policy_file=None, rules=None, default_rule=None): self.rules = Rules(rules, default_rule) self.default_rule = default_rule or CONF.policy_default_rule self.policy_path = None self.policy_file = policy_file or CONF.policy_file def set_rules(self, rules, overwrite=True): """Create a new Rules object based on the provided dict of rules. :param rules: New rules to use. It should be an instance of dict. :param overwrite: Whether to overwrite current rules or update them with the new rules. """ if not isinstance(rules, dict): raise TypeError(_("Rules must be an instance of dict or Rules, " "got %s instead") % type(rules)) if overwrite: self.rules = Rules(rules, self.default_rule) else: self.rules.update(rules) def clear(self): """Clears Enforcer rules, policy's cache and policy's path.""" self.set_rules({}) self.default_rule = None self.policy_path = None def load_rules(self, force_reload=False): """Loads policy_path's rules. Policy file is cached and will be reloaded if modified. :param force_reload: Whether to overwrite current rules. """ if not self.policy_path: self.policy_path = self._get_policy_path() reloaded, data = fileutils.read_cached_file(self.policy_path, force_reload=force_reload) if reloaded or not self.rules: rules = Rules.load_json(data, self.default_rule) self.set_rules(rules) LOG.debug(_("Rules successfully reloaded")) def _get_policy_path(self): """Locate the policy json data file. :param policy_file: Custom policy file to locate. :returns: The policy path :raises: ConfigFilesNotFoundError if the file couldn't be located. """ policy_file = CONF.find_file(self.policy_file) if policy_file: return policy_file raise cfg.ConfigFilesNotFoundError((self.policy_file,)) def enforce(self, rule, target, creds, do_raise=False, exc=None, *args, **kwargs): """Checks authorization of a rule against the target and credentials. :param rule: A string or BaseCheck instance specifying the rule to evaluate. :param target: As much information about the object being operated on as possible, as a dictionary. :param creds: As much information about the user performing the action as possible, as a dictionary. :param do_raise: Whether to raise an exception or not if check fails. :param exc: Class of the exception to raise if the check fails. Any remaining arguments passed to check() (both positional and keyword arguments) will be passed to the exception class. If not specified, PolicyNotAuthorized will be used. :return: Returns False if the policy does not allow the action and exc is not provided; otherwise, returns a value that evaluates to True. Note: for rules using the "case" expression, this True value will be the specified string from the expression. """ # NOTE(flaper87): Not logging target or creds to avoid # potential security issues. LOG.debug(_("Rule %s will be now enforced") % rule) self.load_rules() # Allow the rule to be a Check tree if isinstance(rule, BaseCheck): result = rule(target, creds, self) elif not self.rules: # No rules to reference means we're going to fail closed result = False else: try: # Evaluate the rule result = self.rules[rule](target, creds, self) except KeyError: LOG.debug(_("Rule [%s] doesn't exist") % rule) # If the rule doesn't exist, fail closed result = False # If it is False, raise the exception if requested if do_raise and not result: if exc: raise exc(*args, **kwargs) raise PolicyNotAuthorized(rule) return result @six.add_metaclass(abc.ABCMeta) class BaseCheck(object): """Abstract base class for Check classes.""" @abc.abstractmethod def __str__(self): """String representation of the Check tree rooted at this node.""" pass @abc.abstractmethod def __call__(self, target, cred, enforcer): """Triggers if instance of the class is called. Performs the check. Returns False to reject the access or a true value (not necessary True) to accept the access. """ pass class FalseCheck(BaseCheck): """A policy check that always returns False (disallow).""" def __str__(self): """Return a string representation of this check.""" return "!" def __call__(self, target, cred, enforcer): """Check the policy.""" return False class TrueCheck(BaseCheck): """A policy check that always returns True (allow).""" def __str__(self): """Return a string representation of this check.""" return "@" def __call__(self, target, cred, enforcer): """Check the policy.""" return True class Check(BaseCheck): """A base class to allow for user-defined policy checks.""" def __init__(self, kind, match): """Initiates Check instance. :param kind: The kind of the check, i.e., the field before the ':'. :param match: The match of the check, i.e., the field after the ':'. """ self.kind = kind self.match = match def __str__(self): """Return a string representation of this check.""" return "%s:%s" % (self.kind, self.match) class NotCheck(BaseCheck): """Implements the "not" logical operator. A policy check that inverts the result of another policy check. """ def __init__(self, rule): """Initialize the 'not' check. :param rule: The rule to negate. Must be a Check. """ self.rule = rule def __str__(self): """Return a string representation of this check.""" return "not %s" % self.rule def __call__(self, target, cred, enforcer): """Check the policy. Returns the logical inverse of the wrapped check. """ return not self.rule(target, cred, enforcer) class AndCheck(BaseCheck): """Implements the "and" logical operator. A policy check that requires that a list of other checks all return True. """ def __init__(self, rules): """Initialize the 'and' check. :param rules: A list of rules that will be tested. """ self.rules = rules def __str__(self): """Return a string representation of this check.""" return "(%s)" % ' and '.join(str(r) for r in self.rules) def __call__(self, target, cred, enforcer): """Check the policy. Requires that all rules accept in order to return True. """ for rule in self.rules: if not rule(target, cred, enforcer): return False return True def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. Returns the AndCheck object for convenience. """ self.rules.append(rule) return self class OrCheck(BaseCheck): """Implements the "or" operator. A policy check that requires that at least one of a list of other checks returns True. """ def __init__(self, rules): """Initialize the 'or' check. :param rules: A list of rules that will be tested. """ self.rules = rules def __str__(self): """Return a string representation of this check.""" return "(%s)" % ' or '.join(str(r) for r in self.rules) def __call__(self, target, cred, enforcer): """Check the policy. Requires that at least one rule accept in order to return True. """ for rule in self.rules: if rule(target, cred, enforcer): return True return False def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. Returns the OrCheck object for convenience. """ self.rules.append(rule) return self def _parse_check(rule): """Parse a single base check rule into an appropriate Check object.""" # Handle the special checks if rule == '!': return FalseCheck() elif rule == '@': return TrueCheck() try: kind, match = rule.split(':', 1) except Exception: LOG.exception(_("Failed to understand rule %s") % rule) # If the rule is invalid, we'll fail closed return FalseCheck() # Find what implements the check if kind in _checks: return _checks[kind](kind, match) elif None in _checks: return _checks[None](kind, match) else: LOG.error(_("No handler for matches of kind %s") % kind) return FalseCheck() def _parse_list_rule(rule): """Translates the old list-of-lists syntax into a tree of Check objects. Provided for backwards compatibility. """ # Empty rule defaults to True if not rule: return TrueCheck() # Outer list is joined by "or"; inner list by "and" or_list = [] for inner_rule in rule: # Elide empty inner lists if not inner_rule: continue # Handle bare strings if isinstance(inner_rule, six.string_types): inner_rule = [inner_rule] # Parse the inner rules into Check objects and_list = [_parse_check(r) for r in inner_rule] # Append the appropriate check to the or_list if len(and_list) == 1: or_list.append(and_list[0]) else: or_list.append(AndCheck(and_list)) # If we have only one check, omit the "or" if not or_list: return FalseCheck() elif len(or_list) == 1: return or_list[0] return OrCheck(or_list) # Used for tokenizing the policy language _tokenize_re = re.compile(r'\s+') def _parse_tokenize(rule): """Tokenizer for the policy language. Most of the single-character tokens are specified in the _tokenize_re; however, parentheses need to be handled specially, because they can appear inside a check string. Thankfully, those parentheses that appear inside a check string can never occur at the very beginning or end ("%(variable)s" is the correct syntax). """ for tok in _tokenize_re.split(rule): # Skip empty tokens if not tok or tok.isspace(): continue # Handle leading parens on the token clean = tok.lstrip('(') for i in range(len(tok) - len(clean)): yield '(', '(' # If it was only parentheses, continue if not clean: continue else: tok = clean # Handle trailing parens on the token clean = tok.rstrip(')') trail = len(tok) - len(clean) # Yield the cleaned token lowered = clean.lower() if lowered in ('and', 'or', 'not'): # Special tokens yield lowered, clean elif clean: # Not a special token, but not composed solely of ')' if len(tok) >= 2 and ((tok[0], tok[-1]) in [('"', '"'), ("'", "'")]): # It's a quoted string yield 'string', tok[1:-1] else: yield 'check', _parse_check(clean) # Yield the trailing parens for i in range(trail): yield ')', ')' class ParseStateMeta(type): """Metaclass for the ParseState class. Facilitates identifying reduction methods. """ def __new__(mcs, name, bases, cls_dict): """Create the class. Injects the 'reducers' list, a list of tuples matching token sequences to the names of the corresponding reduction methods. """ reducers = [] for key, value in cls_dict.items(): if not hasattr(value, 'reducers'): continue for reduction in value.reducers: reducers.append((reduction, key)) cls_dict['reducers'] = reducers return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) def reducer(*tokens): """Decorator for reduction methods. Arguments are a sequence of tokens, in order, which should trigger running this reduction method. """ def decorator(func): # Make sure we have a list of reducer sequences if not hasattr(func, 'reducers'): func.reducers = [] # Add the tokens to the list of reducer sequences func.reducers.append(list(tokens)) return func return decorator @six.add_metaclass(ParseStateMeta) class ParseState(object): """Implement the core of parsing the policy language. Uses a greedy reduction algorithm to reduce a sequence of tokens into a single terminal, the value of which will be the root of the Check tree. Note: error reporting is rather lacking. The best we can get with this parser formulation is an overall "parse failed" error. Fortunately, the policy language is simple enough that this shouldn't be that big a problem. """ def __init__(self): """Initialize the ParseState.""" self.tokens = [] self.values = [] def reduce(self): """Perform a greedy reduction of the token stream. If a reducer method matches, it will be executed, then the reduce() method will be called recursively to search for any more possible reductions. """ for reduction, methname in self.reducers: if (len(self.tokens) >= len(reduction) and self.tokens[-len(reduction):] == reduction): # Get the reduction method meth = getattr(self, methname) # Reduce the token stream results = meth(*self.values[-len(reduction):]) # Update the tokens and values self.tokens[-len(reduction):] = [r[0] for r in results] self.values[-len(reduction):] = [r[1] for r in results] # Check for any more reductions return self.reduce() def shift(self, tok, value): """Adds one more token to the state. Calls reduce().""" self.tokens.append(tok) self.values.append(value) # Do a greedy reduce... self.reduce() @property def result(self): """Obtain the final result of the parse. Raises ValueError if the parse failed to reduce to a single result. """ if len(self.values) != 1: raise ValueError("Could not parse rule") return self.values[0] @reducer('(', 'check', ')') @reducer('(', 'and_expr', ')') @reducer('(', 'or_expr', ')') def _wrap_check(self, _p1, check, _p2): """Turn parenthesized expressions into a 'check' token.""" return [('check', check)] @reducer('check', 'and', 'check') def _make_and_expr(self, check1, _and, check2): """Create an 'and_expr'. Join two checks by the 'and' operator. """ return [('and_expr', AndCheck([check1, check2]))] @reducer('and_expr', 'and', 'check') def _extend_and_expr(self, and_expr, _and, check): """Extend an 'and_expr' by adding one more check.""" return [('and_expr', and_expr.add_check(check))] @reducer('check', 'or', 'check') def _make_or_expr(self, check1, _or, check2): """Create an 'or_expr'. Join two checks by the 'or' operator. """ return [('or_expr', OrCheck([check1, check2]))] @reducer('or_expr', 'or', 'check') def _extend_or_expr(self, or_expr, _or, check): """Extend an 'or_expr' by adding one more check.""" return [('or_expr', or_expr.add_check(check))] @reducer('not', 'check') def _make_not_expr(self, _not, check): """Invert the result of another check.""" return [('check', NotCheck(check))] def _parse_text_rule(rule): """Parses policy to the tree. Translates a policy written in the policy language into a tree of Check objects. """ # Empty rule means always accept if not rule: return TrueCheck() # Parse the token stream state = ParseState() for tok, value in _parse_tokenize(rule): state.shift(tok, value) try: return state.result except ValueError: # Couldn't parse the rule LOG.exception(_("Failed to understand rule %r") % rule) # Fail closed return FalseCheck() def parse_rule(rule): """Parses a policy rule into a tree of Check objects.""" # If the rule is a string, it's in the policy language if isinstance(rule, six.string_types): return _parse_text_rule(rule) return _parse_list_rule(rule) def register(name, func=None): """Register a function or Check class as a policy check. :param name: Gives the name of the check type, e.g., 'rule', 'role', etc. If name is None, a default check type will be registered. :param func: If given, provides the function or class to register. If not given, returns a function taking one argument to specify the function or class to register, allowing use as a decorator. """ # Perform the actual decoration by registering the function or # class. Returns the function or class for compliance with the # decorator interface. def decorator(func): _checks[name] = func return func # If the function or class is given, do the registration if func: return decorator(func) return decorator @register("rule") class RuleCheck(Check): def __call__(self, target, creds, enforcer): """Recursively checks credentials based on the defined rules.""" try: return enforcer.rules[self.match](target, creds, enforcer) except KeyError: # We don't have any matching rule; fail closed return False @register("role") class RoleCheck(Check): def __call__(self, target, creds, enforcer): """Check that there is a matching role in the cred dict.""" return self.match.lower() in [x.lower() for x in creds['roles']] @register('http') class HttpCheck(Check): def __call__(self, target, creds, enforcer): """Check http: rules by calling to a remote server. This example implementation simply verifies that the response is exactly 'True'. """ url = ('http:' + self.match) % target data = {'target': jsonutils.dumps(target), 'credentials': jsonutils.dumps(creds)} post_data = urlparse.urlencode(data) f = urlrequest.urlopen(url, post_data) return f.read() == "True" @register(None) class GenericCheck(Check): def __call__(self, target, creds, enforcer): """Check an individual match. Matches look like: tenant:%(tenant_id)s role:compute:admin """ # TODO(termie): do dict inspection via dot syntax try: match = self.match % target except KeyError: # While doing GenericCheck if key not # present in Target return false return False if self.kind in creds: return match == six.text_type(creds[self.kind]) return False ceilometer-2014.1.5/ceilometer/openstack/common/log_handler.py0000664000567000056700000000213412540642615025500 0ustar jenkinsjenkins00000000000000# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from oslo.config import cfg from ceilometer.openstack.common import notifier class PublishErrorsHandler(logging.Handler): def emit(self, record): if ('ceilometer.openstack.common.notifier.log_notifier' in cfg.CONF.notification_driver): return notifier.api.notify(None, 'error.publisher', 'error_notification', notifier.api.ERROR, dict(error=record.getMessage())) ceilometer-2014.1.5/ceilometer/openstack/common/log.py0000664000567000056700000006064412540642615024015 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Openstack logging handler. This module adds to logging functionality by adding the option to specify a context object when calling the various log methods. If the context object is not specified, default formatting is used. Additionally, an instance uuid may be passed as part of the log message, which is intended to make it easier for admins to find messages related to a specific instance. It also allows setting of formatting information through conf. """ import inspect import itertools import logging import logging.config import logging.handlers import os import re import sys import traceback from oslo.config import cfg import six from six import moves from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import local _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" _SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] # NOTE(ldbragst): Let's build a list of regex objects using the list of # _SANITIZE_KEYS we already have. This way, we only have to add the new key # to the list of _SANITIZE_KEYS and we can generate regular expressions # for XML and JSON automatically. _SANITIZE_PATTERNS = [] _FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', r'(<%(key)s>).*?()', r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] for key in _SANITIZE_KEYS: for pattern in _FORMAT_PATTERNS: reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) _SANITIZE_PATTERNS.append(reg_ex) common_cli_opts = [ cfg.BoolOpt('debug', short='d', default=False, help='Print debugging output (set logging level to ' 'DEBUG instead of default WARNING level).'), cfg.BoolOpt('verbose', short='v', default=False, help='Print more verbose output (set logging level to ' 'INFO instead of default WARNING level).'), ] logging_cli_opts = [ cfg.StrOpt('log-config-append', metavar='PATH', deprecated_name='log-config', help='The name of logging configuration file. It does not ' 'disable existing loggers, but just appends specified ' 'logging configuration to any other existing logging ' 'options. Please see the Python logging module ' 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', default=None, metavar='FORMAT', help='DEPRECATED. ' 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' 'This option is deprecated. Please use ' 'logging_context_format_string and ' 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', help='Format string for %%(asctime)s in log records. ' 'Default: %(default)s'), cfg.StrOpt('log-file', metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', help='(Optional) The base directory used for relative ' '--log-file paths'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging. ' 'Existing syslog format is DEPRECATED during I, ' 'and then will be changed in J to honor RFC5424'), cfg.BoolOpt('use-syslog-rfc-format', # TODO(bogdando) remove or use True after existing # syslog format deprecation in J default=False, help='(Optional) Use syslog rfc5424 format for logging. ' 'If enabled, will add APP-NAME (RFC5424) before the ' 'MSG part of the syslog message. The old format ' 'without APP-NAME is deprecated in I, ' 'and will be removed in J.'), cfg.StrOpt('syslog-log-facility', default='LOG_USER', help='Syslog facility to receive log lines') ] generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, help='Log output to standard error') ] log_opts = [ cfg.StrOpt('logging_context_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [%(request_id)s %(user_identity)s] ' '%(instance)s%(message)s', help='Format string to use for log messages with context'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [-] %(instance)s%(message)s', help='Format string to use for log messages without context'), cfg.StrOpt('logging_debug_format_suffix', default='%(funcName)s %(pathname)s:%(lineno)d', help='Data to append to log format when level is DEBUG'), cfg.StrOpt('logging_exception_prefix', default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' '%(instance)s', help='Prefix each line of exception output with this format'), cfg.ListOpt('default_log_levels', default=[ 'amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN' ], help='List of logger=LEVEL pairs'), cfg.BoolOpt('publish_errors', default=False, help='Publish error events'), cfg.BoolOpt('fatal_deprecations', default=False, help='Make deprecations fatal'), # NOTE(mikal): there are two options here because sometimes we are handed # a full instance (and could include more information), and other times we # are just handed a UUID for the instance. cfg.StrOpt('instance_format', default='[instance: %(uuid)s] ', help='If an instance is passed with the log message, format ' 'it like this'), cfg.StrOpt('instance_uuid_format', default='[instance: %(uuid)s] ', help='If an instance UUID is passed with the log message, ' 'format it like this'), ] CONF = cfg.CONF CONF.register_cli_opts(common_cli_opts) CONF.register_cli_opts(logging_cli_opts) CONF.register_opts(generic_log_opts) CONF.register_opts(log_opts) # our new audit level # NOTE(jkoelker) Since we synthesized an audit level, make the logging # module aware of it so it acts like other levels. logging.AUDIT = logging.INFO + 1 logging.addLevelName(logging.AUDIT, 'AUDIT') try: NullHandler = logging.NullHandler except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None def _dictify_context(context): if context is None: return None if not isinstance(context, dict) and getattr(context, 'to_dict', None): context = context.to_dict() return context def _get_binary_name(): return os.path.basename(inspect.stack()[-1][1]) def _get_log_file_path(binary=None): logfile = CONF.log_file logdir = CONF.log_dir if logfile and not logdir: return logfile if logfile and logdir: return os.path.join(logdir, logfile) if logdir: binary = binary or _get_binary_name() return '%s.log' % (os.path.join(logdir, binary),) return None def mask_password(message, secret="***"): """Replace password with 'secret' in message. :param message: The string which includes security information. :param secret: value with which to replace passwords. :returns: The unicode value of message with the password fields masked. For example: >>> mask_password("'adminPass' : 'aaaaa'") "'adminPass' : '***'" >>> mask_password("'admin_pass' : 'aaaaa'") "'admin_pass' : '***'" >>> mask_password('"password" : "aaaaa"') '"password" : "***"' >>> mask_password("'original_password' : 'aaaaa'") "'original_password' : '***'" >>> mask_password("u'original_password' : u'aaaaa'") "u'original_password' : u'***'" """ message = six.text_type(message) # NOTE(ldbragst): Check to see if anything in message contains any key # specified in _SANITIZE_KEYS, if not then just return the message since # we don't have to mask any passwords. if not any(key in message for key in _SANITIZE_KEYS): return message secret = r'\g<1>' + secret + r'\g<2>' for pattern in _SANITIZE_PATTERNS: message = re.sub(pattern, secret, message) return message class BaseLoggerAdapter(logging.LoggerAdapter): def audit(self, msg, *args, **kwargs): self.log(logging.AUDIT, msg, *args, **kwargs) class LazyAdapter(BaseLoggerAdapter): def __init__(self, name='unknown', version='unknown'): self._logger = None self.extra = {} self.name = name self.version = version @property def logger(self): if not self._logger: self._logger = getLogger(self.name, self.version) return self._logger class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): self.logger = logger self.project = project_name self.version = version_string @property def handlers(self): return self.logger.handlers def deprecated(self, msg, *args, **kwargs): stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) else: self.warn(stdmsg, *args, **kwargs) def process(self, msg, kwargs): # NOTE(mrodden): catch any Message/other object and # coerce to unicode before they can get # to the python logging and possibly # cause string encoding trouble if not isinstance(msg, six.string_types): msg = six.text_type(msg) if 'extra' not in kwargs: kwargs['extra'] = {} extra = kwargs['extra'] context = kwargs.pop('context', None) if not context: context = getattr(local.store, 'context', None) if context: extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) instance_uuid = (extra.get('instance_uuid', None) or kwargs.pop('instance_uuid', None)) instance_extra = '' if instance: instance_extra = CONF.instance_format % instance elif instance_uuid: instance_extra = (CONF.instance_uuid_format % {'uuid': instance_uuid}) extra['instance'] = instance_extra extra.setdefault('user_identity', kwargs.pop('user_identity', None)) extra['project'] = self.project extra['version'] = self.version extra['extra'] = extra.copy() return msg, kwargs class JSONFormatter(logging.Formatter): def __init__(self, fmt=None, datefmt=None): # NOTE(jkoelker) we ignore the fmt argument, but its still there # since logging.config.fileConfig passes it. self.datefmt = datefmt def formatException(self, ei, strip_newlines=True): lines = traceback.format_exception(*ei) if strip_newlines: lines = [moves.filter( lambda x: x, line.rstrip().splitlines()) for line in lines] lines = list(itertools.chain(*lines)) return lines def format(self, record): message = {'message': record.getMessage(), 'asctime': self.formatTime(record, self.datefmt), 'name': record.name, 'msg': record.msg, 'args': record.args, 'levelname': record.levelname, 'levelno': record.levelno, 'pathname': record.pathname, 'filename': record.filename, 'module': record.module, 'lineno': record.lineno, 'funcname': record.funcName, 'created': record.created, 'msecs': record.msecs, 'relative_created': record.relativeCreated, 'thread': record.thread, 'thread_name': record.threadName, 'process_name': record.processName, 'process': record.process, 'traceback': None} if hasattr(record, 'extra'): message['extra'] = record.extra if record.exc_info: message['traceback'] = self.formatException(record.exc_info) return jsonutils.dumps(message) def _create_logging_excepthook(product_name): def logging_excepthook(exc_type, value, tb): extra = {} if CONF.verbose or CONF.debug: extra['exc_info'] = (exc_type, value, tb) getLogger(product_name).critical( "".join(traceback.format_exception_only(exc_type, value)), **extra) return logging_excepthook class LogConfigError(Exception): message = _('Error loading logging config %(log_config)s: %(err_msg)s') def __init__(self, log_config, err_msg): self.log_config = log_config self.err_msg = err_msg def __str__(self): return self.message % dict(log_config=self.log_config, err_msg=self.err_msg) def _load_log_config(log_config_append): try: logging.config.fileConfig(log_config_append, disable_existing_loggers=False) except moves.configparser.Error as exc: raise LogConfigError(log_config_append, str(exc)) def setup(product_name, version='unknown'): """Setup logging.""" if CONF.log_config_append: _load_log_config(CONF.log_config_append) else: _setup_logging_from_conf(product_name, version) sys.excepthook = _create_logging_excepthook(product_name) def set_defaults(logging_context_format_string): cfg.set_defaults(log_opts, logging_context_format_string= logging_context_format_string) def _find_facility_from_conf(): facility_names = logging.handlers.SysLogHandler.facility_names facility = getattr(logging.handlers.SysLogHandler, CONF.syslog_log_facility, None) if facility is None and CONF.syslog_log_facility in facility_names: facility = facility_names.get(CONF.syslog_log_facility) if facility is None: valid_facilities = facility_names.keys() consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] valid_facilities.extend(consts) raise TypeError(_('syslog facility must be one of: %s') % ', '.join("'%s'" % fac for fac in valid_facilities)) return facility class RFCSysLogHandler(logging.handlers.SysLogHandler): def __init__(self, *args, **kwargs): self.binary_name = _get_binary_name() super(RFCSysLogHandler, self).__init__(*args, **kwargs) def format(self, record): msg = super(RFCSysLogHandler, self).format(record) msg = self.binary_name + ' ' + msg return msg def _setup_logging_from_conf(project, version): log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) if CONF.use_syslog: facility = _find_facility_from_conf() # TODO(bogdando) use the format provided by RFCSysLogHandler # after existing syslog format deprecation in J if CONF.use_syslog_rfc_format: syslog = RFCSysLogHandler(address='/dev/log', facility=facility) else: syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) elif not logpath: # pass sys.stdout as a positional argument # python2.6 calls the argument strm, in 2.7 it's stream streamlog = logging.StreamHandler(sys.stdout) log_root.addHandler(streamlog) if CONF.publish_errors: handler = importutils.import_object( "ceilometer.openstack.common.log_handler.PublishErrorsHandler", logging.ERROR) log_root.addHandler(handler) datefmt = CONF.log_date_format for handler in log_root.handlers: # NOTE(alaski): CONF.log_format overrides everything currently. This # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) log_root.info('Deprecated: log_format is now deprecated and will ' 'be removed in the next release') else: handler.setFormatter(ContextFormatter(project=project, version=version, datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING) for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) _loggers = {} def getLogger(name='unknown', version='unknown'): if name not in _loggers: _loggers[name] = ContextAdapter(logging.getLogger(name), name, version) return _loggers[name] def getLazyLogger(name='unknown', version='unknown'): """Returns lazy logger. Creates a pass-through logger that does not create the real logger until it is really needed and delegates all calls to the real logger once it is created. """ return LazyAdapter(name, version) class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" def __init__(self, logger, level=logging.INFO): self.logger = logger self.level = level def write(self, msg): self.logger.log(self.level, msg.rstrip()) class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string and logging_default_format_string. You can also specify logging_debug_format_suffix to append extra formatting if the log level is debug. For information about what variables are available for the formatter see: http://docs.python.org/library/logging.html#formatter If available, uses the context value stored in TLS - local.store.context """ def __init__(self, *args, **kwargs): """Initialize ContextFormatter instance Takes additional keyword arguments which can be used in the message format string. :keyword project: project name :type project: string :keyword version: project version :type version: string """ self.project = kwargs.pop('project', 'unknown') self.version = kwargs.pop('version', 'unknown') logging.Formatter.__init__(self, *args, **kwargs) def format(self, record): """Uses contextstring if request_id is set, otherwise default.""" # store project info record.project = self.project record.version = self.version # store request info context = getattr(local.store, 'context', None) if context: d = _dictify_context(context) for k, v in d.items(): setattr(record, k, v) # NOTE(sdague): default the fancier formatting params # to an empty string so we don't throw an exception if # they get used for key in ('instance', 'color'): if key not in record.__dict__: record.__dict__[key] = '' if record.__dict__.get('request_id', None): self._fmt = CONF.logging_context_format_string else: self._fmt = CONF.logging_default_format_string if (record.levelno == logging.DEBUG and CONF.logging_debug_format_suffix): self._fmt += " " + CONF.logging_debug_format_suffix # Cache this on the record, Logger will respect our formatted copy if record.exc_info: record.exc_text = self.formatException(record.exc_info, record) return logging.Formatter.format(self, record) def formatException(self, exc_info, record=None): """Format exception output with CONF.logging_exception_prefix.""" if not record: return logging.Formatter.formatException(self, exc_info) stringbuffer = moves.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') stringbuffer.close() if CONF.logging_exception_prefix.find('%(asctime)') != -1: record.asctime = self.formatTime(record, self.datefmt) formatted_lines = [] for line in lines: pl = CONF.logging_exception_prefix % record.__dict__ fl = '%s%s' % (pl, line) formatted_lines.append(fl) return '\n'.join(formatted_lines) class ColorHandler(logging.StreamHandler): LEVEL_COLORS = { logging.DEBUG: '\033[00;32m', # GREEN logging.INFO: '\033[00;36m', # CYAN logging.AUDIT: '\033[01;36m', # BOLD CYAN logging.WARN: '\033[01;33m', # BOLD YELLOW logging.ERROR: '\033[01;31m', # BOLD RED logging.CRITICAL: '\033[01;31m', # BOLD RED } def format(self, record): record.color = self.LEVEL_COLORS[record.levelno] return logging.StreamHandler.format(self, record) class DeprecatedConfig(Exception): message = _("Fatal call to deprecated config: %(msg)s") def __init__(self, msg): super(Exception, self).__init__(self.message % dict(msg=msg)) ceilometer-2014.1.5/ceilometer/openstack/common/__init__.py0000664000567000056700000000000012540642613024745 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/local.py0000664000567000056700000000321512540642615024315 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Local storage of variables using weak references""" import threading import weakref class WeakLocal(threading.local): def __getattribute__(self, attr): rval = super(WeakLocal, self).__getattribute__(attr) if rval: # NOTE(mikal): this bit is confusing. What is stored is a weak # reference, not the value itself. We therefore need to lookup # the weak reference and return the inner value here. rval = rval() return rval def __setattr__(self, attr, value): value = weakref.ref(value) return super(WeakLocal, self).__setattr__(attr, value) # NOTE(mikal): the name "store" should be deprecated in the future store = WeakLocal() # A "weak" store uses weak references and allows an object to fall out of scope # when it falls out of scope in the code that uses the thread local storage. A # "strong" store will hold a reference to the object so that it never falls out # of scope. weak_store = WeakLocal() strong_store = threading.local() ceilometer-2014.1.5/ceilometer/openstack/common/fixture/0000775000567000056700000000000012540643223024332 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/fixture/lockutils.py0000664000567000056700000000354512540642615026730 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from ceilometer.openstack.common import lockutils class LockFixture(fixtures.Fixture): """External locking fixture. This fixture is basically an alternative to the synchronized decorator with the external flag so that tearDowns and addCleanups will be included in the lock context for locking between tests. The fixture is recommended to be the first line in a test method, like so:: def test_method(self): self.useFixture(LockFixture) ... or the first line in setUp if all the test methods in the class are required to be serialized. Something like:: class TestCase(testtools.testcase): def setUp(self): self.useFixture(LockFixture) super(TestCase, self).setUp() ... This is because addCleanups are put on a LIFO queue that gets run after the test method exits. (either by completing or raising an exception) """ def __init__(self, name, lock_file_prefix=None): self.mgr = lockutils.lock(name, lock_file_prefix, True) def setUp(self): super(LockFixture, self).setUp() self.addCleanup(self.mgr.__exit__, None, None, None) self.mgr.__enter__() ceilometer-2014.1.5/ceilometer/openstack/common/fixture/config.py0000664000567000056700000000271112540642615026156 0ustar jenkinsjenkins00000000000000# # Copyright 2013 Mirantis, Inc. # Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from oslo.config import cfg import six class Config(fixtures.Fixture): """Override some configuration values. The keyword arguments are the names of configuration options to override and their values. If a group argument is supplied, the overrides are applied to the specified configuration option group. All overrides are automatically cleared at the end of the current test by the reset() method, which is registered by addCleanup(). """ def __init__(self, conf=cfg.CONF): self.conf = conf def setUp(self): super(Config, self).setUp() self.addCleanup(self.conf.reset) def config(self, **kw): group = kw.pop('group', None) for k, v in six.iteritems(kw): self.conf.set_override(k, v, group) ceilometer-2014.1.5/ceilometer/openstack/common/fixture/__init__.py0000664000567000056700000000000012540642615026435 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/fixture/moxstubout.py0000664000567000056700000000230712540642615027143 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures import mox class MoxStubout(fixtures.Fixture): """Deal with code around mox and stubout as a fixture.""" def setUp(self): super(MoxStubout, self).setUp() # emulate some of the mox stuff, we can't use the metaclass # because it screws with our generators self.mox = mox.Mox() self.stubs = self.mox.stubs self.addCleanup(self.mox.UnsetStubs) self.addCleanup(self.mox.VerifyAll) ceilometer-2014.1.5/ceilometer/openstack/common/fixture/mockpatch.py0000664000567000056700000000313412540642615026662 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures import mock class PatchObject(fixtures.Fixture): """Deal with code around mock.""" def __init__(self, obj, attr, new=mock.DEFAULT, **kwargs): self.obj = obj self.attr = attr self.kwargs = kwargs self.new = new def setUp(self): super(PatchObject, self).setUp() _p = mock.patch.object(self.obj, self.attr, self.new, **self.kwargs) self.mock = _p.start() self.addCleanup(_p.stop) class Patch(fixtures.Fixture): """Deal with code around mock.patch.""" def __init__(self, obj, new=mock.DEFAULT, **kwargs): self.obj = obj self.kwargs = kwargs self.new = new def setUp(self): super(Patch, self).setUp() _p = mock.patch(self.obj, self.new, **self.kwargs) self.mock = _p.start() self.addCleanup(_p.stop) ceilometer-2014.1.5/ceilometer/openstack/common/threadgroup.py0000664000567000056700000001012012540642615025540 0ustar jenkinsjenkins00000000000000# Copyright 2012 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import threading import eventlet from eventlet import greenpool from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import loopingcall LOG = logging.getLogger(__name__) def _thread_done(gt, *args, **kwargs): """Callback function to be passed to GreenThread.link() when we spawn() Calls the :class:`ThreadGroup` to notify if. """ kwargs['group'].thread_done(kwargs['thread']) class Thread(object): """Wrapper around a greenthread, that holds a reference to the :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when it has done so it can be removed from the threads list. """ def __init__(self, thread, group): self.thread = thread self.thread.link(_thread_done, group=group, thread=self) def stop(self): self.thread.kill() def wait(self): return self.thread.wait() def link(self, func, *args, **kwargs): self.thread.link(func, *args, **kwargs) class ThreadGroup(object): """The point of the ThreadGroup class is to: * keep track of timers and greenthreads (making it easier to stop them when need be). * provide an easy API to add timers. """ def __init__(self, thread_pool_size=10): self.pool = greenpool.GreenPool(thread_pool_size) self.threads = [] self.timers = [] def add_dynamic_timer(self, callback, initial_delay=None, periodic_interval_max=None, *args, **kwargs): timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) timer.start(initial_delay=initial_delay, periodic_interval_max=periodic_interval_max) self.timers.append(timer) def add_timer(self, interval, callback, initial_delay=None, *args, **kwargs): pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) pulse.start(interval=interval, initial_delay=initial_delay) self.timers.append(pulse) def add_thread(self, callback, *args, **kwargs): gt = self.pool.spawn(callback, *args, **kwargs) th = Thread(gt, self) self.threads.append(th) return th def thread_done(self, thread): self.threads.remove(thread) def stop(self): current = threading.current_thread() # Iterate over a copy of self.threads so thread_done doesn't # modify the list while we're iterating for x in self.threads[:]: if x is current: # don't kill the current thread. continue try: x.stop() except Exception as ex: LOG.exception(ex) for x in self.timers: try: x.stop() except Exception as ex: LOG.exception(ex) self.timers = [] def wait(self): for x in self.timers: try: x.wait() except eventlet.greenlet.GreenletExit: pass except Exception as ex: LOG.exception(ex) current = threading.current_thread() # Iterate over a copy of self.threads so thread_done doesn't # modify the list while we're iterating for x in self.threads[:]: if x is current: continue try: x.wait() except eventlet.greenlet.GreenletExit: pass except Exception as ex: LOG.exception(ex) ceilometer-2014.1.5/ceilometer/openstack/common/service.py0000664000567000056700000003535412540642615024674 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generic Node base class for all workers that run on hosts.""" import errno import logging as std_logging import os import random import signal import sys import time try: # Importing just the symbol here because the io module does not # exist in Python 2.6. from io import UnsupportedOperation # noqa except ImportError: # Python 2.6 UnsupportedOperation = None import eventlet from eventlet import event from oslo.config import cfg from ceilometer.openstack.common import eventlet_backdoor from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import threadgroup rpc = importutils.try_import('ceilometer.openstack.common.rpc') CONF = cfg.CONF LOG = logging.getLogger(__name__) def _sighup_supported(): return hasattr(signal, 'SIGHUP') def _is_daemon(): # The process group for a foreground process will match the # process group of the controlling terminal. If those values do # not match, or ioctl() fails on the stdout file handle, we assume # the process is running in the background as a daemon. # http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics try: is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno()) except OSError as err: if err.errno == errno.ENOTTY: # Assume we are a daemon because there is no terminal. is_daemon = True else: raise except UnsupportedOperation: # Could not get the fileno for stdout, so we must be a daemon. is_daemon = True return is_daemon def _is_sighup_and_daemon(signo): if not (_sighup_supported() and signo == signal.SIGHUP): # Avoid checking if we are a daemon, because the signal isn't # SIGHUP. return False return _is_daemon() def _signo_to_signame(signo): signals = {signal.SIGTERM: 'SIGTERM', signal.SIGINT: 'SIGINT'} if _sighup_supported(): signals[signal.SIGHUP] = 'SIGHUP' return signals[signo] def _set_signals_handler(handler): signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGINT, handler) if _sighup_supported(): signal.signal(signal.SIGHUP, handler) class Launcher(object): """Launch one or more services and wait for them to complete.""" def __init__(self): """Initialize the service launcher. :returns: None """ self.services = Services() self.backdoor_port = eventlet_backdoor.initialize_if_enabled() def launch_service(self, service): """Load and start the given service. :param service: The service you would like to start. :returns: None """ service.backdoor_port = self.backdoor_port self.services.add(service) def stop(self): """Stop all services which are currently running. :returns: None """ self.services.stop() def wait(self): """Waits until all services have been stopped, and then returns. :returns: None """ self.services.wait() def restart(self): """Reload config files and restart service. :returns: None """ cfg.CONF.reload_config_files() self.services.restart() class SignalExit(SystemExit): def __init__(self, signo, exccode=1): super(SignalExit, self).__init__(exccode) self.signo = signo class ServiceLauncher(Launcher): def _handle_signal(self, signo, frame): # Allow the process to be killed again and die from natural causes _set_signals_handler(signal.SIG_DFL) raise SignalExit(signo) def handle_signal(self): _set_signals_handler(self._handle_signal) def _wait_for_exit_or_signal(self, ready_callback=None): status = None signo = 0 LOG.debug(_('Full set of CONF:')) CONF.log_opt_values(LOG, std_logging.DEBUG) try: if ready_callback: ready_callback() super(ServiceLauncher, self).wait() except SignalExit as exc: signame = _signo_to_signame(exc.signo) LOG.info(_('Caught %s, exiting'), signame) status = exc.code signo = exc.signo except SystemExit as exc: status = exc.code finally: self.stop() if rpc: try: rpc.cleanup() except Exception: # We're shutting down, so it doesn't matter at this point. LOG.exception(_('Exception during rpc cleanup.')) return status, signo def wait(self, ready_callback=None): while True: self.handle_signal() status, signo = self._wait_for_exit_or_signal(ready_callback) if not _is_sighup_and_daemon(signo): return status self.restart() class ServiceWrapper(object): def __init__(self, service, workers): self.service = service self.workers = workers self.children = set() self.forktimes = [] class ProcessLauncher(object): def __init__(self, wait_interval=0.01): """Constructor. :param wait_interval: The interval to sleep for between checks of child process exit. """ self.children = {} self.sigcaught = None self.running = True self.wait_interval = wait_interval rfd, self.writepipe = os.pipe() self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') self.handle_signal() def handle_signal(self): _set_signals_handler(self._handle_signal) def _handle_signal(self, signo, frame): self.sigcaught = signo self.running = False # Allow the process to be killed again and die from natural causes _set_signals_handler(signal.SIG_DFL) def _pipe_watcher(self): # This will block until the write end is closed when the parent # dies unexpectedly self.readpipe.read() LOG.info(_('Parent process has died unexpectedly, exiting')) sys.exit(1) def _child_process_handle_signal(self): # Setup child signal handlers differently def _sigterm(*args): signal.signal(signal.SIGTERM, signal.SIG_DFL) raise SignalExit(signal.SIGTERM) def _sighup(*args): signal.signal(signal.SIGHUP, signal.SIG_DFL) raise SignalExit(signal.SIGHUP) signal.signal(signal.SIGTERM, _sigterm) if _sighup_supported(): signal.signal(signal.SIGHUP, _sighup) # Block SIGINT and let the parent send us a SIGTERM signal.signal(signal.SIGINT, signal.SIG_IGN) def _child_wait_for_exit_or_signal(self, launcher): status = 0 signo = 0 # NOTE(johannes): All exceptions are caught to ensure this # doesn't fallback into the loop spawning children. It would # be bad for a child to spawn more children. try: launcher.wait() except SignalExit as exc: signame = _signo_to_signame(exc.signo) LOG.info(_('Caught %s, exiting'), signame) status = exc.code signo = exc.signo except SystemExit as exc: status = exc.code except BaseException: LOG.exception(_('Unhandled exception')) status = 2 finally: launcher.stop() return status, signo def _child_process(self, service): self._child_process_handle_signal() # Reopen the eventlet hub to make sure we don't share an epoll # fd with parent and/or siblings, which would be bad eventlet.hubs.use_hub() # Close write to ensure only parent has it open os.close(self.writepipe) # Create greenthread to watch for parent to close pipe eventlet.spawn_n(self._pipe_watcher) # Reseed random number generator random.seed() launcher = Launcher() launcher.launch_service(service) return launcher def _start_child(self, wrap): if len(wrap.forktimes) > wrap.workers: # Limit ourselves to one process a second (over the period of # number of workers * 1 second). This will allow workers to # start up quickly but ensure we don't fork off children that # die instantly too quickly. if time.time() - wrap.forktimes[0] < wrap.workers: LOG.info(_('Forking too fast, sleeping')) time.sleep(1) wrap.forktimes.pop(0) wrap.forktimes.append(time.time()) pid = os.fork() if pid == 0: launcher = self._child_process(wrap.service) while True: self._child_process_handle_signal() status, signo = self._child_wait_for_exit_or_signal(launcher) if not _is_sighup_and_daemon(signo): break launcher.restart() os._exit(status) LOG.info(_('Started child %d'), pid) wrap.children.add(pid) self.children[pid] = wrap return pid def launch_service(self, service, workers=1): wrap = ServiceWrapper(service, workers) LOG.info(_('Starting %d workers'), wrap.workers) while self.running and len(wrap.children) < wrap.workers: self._start_child(wrap) def _wait_child(self): try: # Don't block if no child processes have exited pid, status = os.waitpid(0, os.WNOHANG) if not pid: return None except OSError as exc: if exc.errno not in (errno.EINTR, errno.ECHILD): raise return None if os.WIFSIGNALED(status): sig = os.WTERMSIG(status) LOG.info(_('Child %(pid)d killed by signal %(sig)d'), dict(pid=pid, sig=sig)) else: code = os.WEXITSTATUS(status) LOG.info(_('Child %(pid)s exited with status %(code)d'), dict(pid=pid, code=code)) if pid not in self.children: LOG.warning(_('pid %d not in child list'), pid) return None wrap = self.children.pop(pid) wrap.children.remove(pid) return wrap def _respawn_children(self): while self.running: wrap = self._wait_child() if not wrap: # Yield to other threads if no children have exited # Sleep for a short time to avoid excessive CPU usage # (see bug #1095346) eventlet.greenthread.sleep(self.wait_interval) continue while self.running and len(wrap.children) < wrap.workers: self._start_child(wrap) def wait(self): """Loop waiting on children to die and respawning as necessary.""" LOG.debug(_('Full set of CONF:')) CONF.log_opt_values(LOG, std_logging.DEBUG) while True: self.handle_signal() self._respawn_children() if self.sigcaught: signame = _signo_to_signame(self.sigcaught) LOG.info(_('Caught %s, stopping children'), signame) if not _is_sighup_and_daemon(self.sigcaught): break for pid in self.children: os.kill(pid, signal.SIGHUP) self.running = True self.sigcaught = None for pid in self.children: try: os.kill(pid, signal.SIGTERM) except OSError as exc: if exc.errno != errno.ESRCH: raise # Wait for children to die if self.children: LOG.info(_('Waiting on %d children to exit'), len(self.children)) while self.children: self._wait_child() class Service(object): """Service object for binaries running on hosts.""" def __init__(self, threads=1000): self.tg = threadgroup.ThreadGroup(threads) # signal that the service is done shutting itself down: self._done = event.Event() def reset(self): # NOTE(Fengqian): docs for Event.reset() recommend against using it self._done = event.Event() def start(self): pass def stop(self): self.tg.stop() self.tg.wait() # Signal that service cleanup is done: if not self._done.ready(): self._done.send() def wait(self): self._done.wait() class Services(object): def __init__(self): self.services = [] self.tg = threadgroup.ThreadGroup() self.done = event.Event() def add(self, service): self.services.append(service) self.tg.add_thread(self.run_service, service, self.done) def stop(self): # wait for graceful shutdown of services: for service in self.services: service.stop() service.wait() # Each service has performed cleanup, now signal that the run_service # wrapper threads can now die: if not self.done.ready(): self.done.send() # reap threads: self.tg.stop() def wait(self): self.tg.wait() def restart(self): self.stop() self.done = event.Event() for restart_service in self.services: restart_service.reset() self.tg.add_thread(self.run_service, restart_service, self.done) @staticmethod def run_service(service, done): """Service start wrapper. :param service: service to run :param done: event to wait on until a shutdown is triggered :returns: None """ service.start() done.wait() def launch(service, workers=1): if workers is None or workers == 1: launcher = ServiceLauncher() launcher.launch_service(service) else: launcher = ProcessLauncher() launcher.launch_service(service, workers=workers) return launcher ceilometer-2014.1.5/ceilometer/openstack/common/versionutils.py0000664000567000056700000001155012540642615025772 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Helpers for comparing version strings. """ import functools import pkg_resources from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging LOG = logging.getLogger(__name__) class deprecated(object): """A decorator to mark callables as deprecated. This decorator logs a deprecation message when the callable it decorates is used. The message will include the release where the callable was deprecated, the release where it may be removed and possibly an optional replacement. Examples: 1. Specifying the required deprecated release >>> @deprecated(as_of=deprecated.ICEHOUSE) ... def a(): pass 2. Specifying a replacement: >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()') ... def b(): pass 3. Specifying the release where the functionality may be removed: >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1) ... def c(): pass """ FOLSOM = 'F' GRIZZLY = 'G' HAVANA = 'H' ICEHOUSE = 'I' _RELEASES = { 'F': 'Folsom', 'G': 'Grizzly', 'H': 'Havana', 'I': 'Icehouse', } _deprecated_msg_with_alternative = _( '%(what)s is deprecated as of %(as_of)s in favor of ' '%(in_favor_of)s and may be removed in %(remove_in)s.') _deprecated_msg_no_alternative = _( '%(what)s is deprecated as of %(as_of)s and may be ' 'removed in %(remove_in)s. It will not be superseded.') def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None): """Initialize decorator :param as_of: the release deprecating the callable. Constants are define in this class for convenience. :param in_favor_of: the replacement for the callable (optional) :param remove_in: an integer specifying how many releases to wait before removing (default: 2) :param what: name of the thing being deprecated (default: the callable's name) """ self.as_of = as_of self.in_favor_of = in_favor_of self.remove_in = remove_in self.what = what def __call__(self, func): if not self.what: self.what = func.__name__ + '()' @functools.wraps(func) def wrapped(*args, **kwargs): msg, details = self._build_message() LOG.deprecated(msg, details) return func(*args, **kwargs) return wrapped def _get_safe_to_remove_release(self, release): # TODO(dstanek): this method will have to be reimplemented once # when we get to the X release because once we get to the Y # release, what is Y+2? new_release = chr(ord(release) + self.remove_in) if new_release in self._RELEASES: return self._RELEASES[new_release] else: return new_release def _build_message(self): details = dict(what=self.what, as_of=self._RELEASES[self.as_of], remove_in=self._get_safe_to_remove_release(self.as_of)) if self.in_favor_of: details['in_favor_of'] = self.in_favor_of msg = self._deprecated_msg_with_alternative else: msg = self._deprecated_msg_no_alternative return msg, details def is_compatible(requested_version, current_version, same_major=True): """Determine whether `requested_version` is satisfied by `current_version`; in other words, `current_version` is >= `requested_version`. :param requested_version: version to check for compatibility :param current_version: version to check against :param same_major: if True, the major version must be identical between `requested_version` and `current_version`. This is used when a major-version difference indicates incompatibility between the two versions. Since this is the common-case in practice, the default is True. :returns: True if compatible, False if not """ requested_parts = pkg_resources.parse_version(requested_version) current_parts = pkg_resources.parse_version(current_version) if same_major and (requested_parts[0] != current_parts[0]): return False return current_parts >= requested_parts ceilometer-2014.1.5/ceilometer/openstack/common/importutils.py0000664000567000056700000000421512540642615025617 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Import related utilities and helper functions. """ import sys import traceback def import_class(import_str): """Returns a class from a string including module and class.""" mod_str, _sep, class_str = import_str.rpartition('.') try: __import__(mod_str) return getattr(sys.modules[mod_str], class_str) except (ValueError, AttributeError): raise ImportError('Class %s cannot be found (%s)' % (class_str, traceback.format_exception(*sys.exc_info()))) def import_object(import_str, *args, **kwargs): """Import a class and return an instance of it.""" return import_class(import_str)(*args, **kwargs) def import_object_ns(name_space, import_str, *args, **kwargs): """Tries to import object from default namespace. Imports a class and return an instance of it, first by trying to find the class in a default namespace, then failing back to a full path if not found in the default namespace. """ import_value = "%s.%s" % (name_space, import_str) try: return import_class(import_value)(*args, **kwargs) except ImportError: return import_class(import_str)(*args, **kwargs) def import_module(import_str): """Import a module.""" __import__(import_str) return sys.modules[import_str] def try_import(import_str, default=None): """Try to import a module and if it fails return default.""" try: return import_module(import_str) except ImportError: return default ceilometer-2014.1.5/ceilometer/openstack/common/config/0000775000567000056700000000000012540643223024111 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/config/generator.py0000664000567000056700000002427012540642615026462 0ustar jenkinsjenkins00000000000000# Copyright 2012 SINA Corporation # Copyright 2014 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Extracts OpenStack config option info from module(s).""" from __future__ import print_function import argparse import imp import os import re import socket import sys import textwrap from oslo.config import cfg import six import stevedore.named from ceilometer.openstack.common import gettextutils from ceilometer.openstack.common import importutils gettextutils.install('ceilometer') STROPT = "StrOpt" BOOLOPT = "BoolOpt" INTOPT = "IntOpt" FLOATOPT = "FloatOpt" LISTOPT = "ListOpt" DICTOPT = "DictOpt" MULTISTROPT = "MultiStrOpt" OPT_TYPES = { STROPT: 'string value', BOOLOPT: 'boolean value', INTOPT: 'integer value', FLOATOPT: 'floating point value', LISTOPT: 'list value', DICTOPT: 'dict value', MULTISTROPT: 'multi valued', } OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, FLOATOPT, LISTOPT, DICTOPT, MULTISTROPT])) PY_EXT = ".py" BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../")) WORDWRAP_WIDTH = 60 def generate(argv): parser = argparse.ArgumentParser( description='generate sample configuration file', ) parser.add_argument('-m', dest='modules', action='append') parser.add_argument('-l', dest='libraries', action='append') parser.add_argument('srcfiles', nargs='*') parsed_args = parser.parse_args(argv) mods_by_pkg = dict() for filepath in parsed_args.srcfiles: pkg_name = filepath.split(os.sep)[1] mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), os.path.basename(filepath).split('.')[0]]) mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) # NOTE(lzyeval): place top level modules before packages pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) pkg_names.extend(ext_names) # opts_by_group is a mapping of group name to an options list # The options list is a list of (module, options) tuples opts_by_group = {'DEFAULT': []} if parsed_args.modules: for module_name in parsed_args.modules: module = _import_module(module_name) if module: for group, opts in _list_opts(module): opts_by_group.setdefault(group, []).append((module_name, opts)) # Look for entry points defined in libraries (or applications) for # option discovery, and include their return values in the output. # # Each entry point should be a function returning an iterable # of pairs with the group name (or None for the default group) # and the list of Opt instances for that group. if parsed_args.libraries: loader = stevedore.named.NamedExtensionManager( 'oslo.config.opts', names=list(set(parsed_args.libraries)), invoke_on_load=False, ) for ext in loader: for group, opts in ext.plugin(): opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) opt_list.append((ext.name, opts)) for pkg_name in pkg_names: mods = mods_by_pkg.get(pkg_name) mods.sort() for mod_str in mods: if mod_str.endswith('.__init__'): mod_str = mod_str[:mod_str.rfind(".")] mod_obj = _import_module(mod_str) if not mod_obj: raise RuntimeError("Unable to import module %s" % mod_str) for group, opts in _list_opts(mod_obj): opts_by_group.setdefault(group, []).append((mod_str, opts)) print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) for group in sorted(opts_by_group.keys()): print_group_opts(group, opts_by_group[group]) def _import_module(mod_str): try: if mod_str.startswith('bin.'): imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) return sys.modules[mod_str[4:]] else: return importutils.import_module(mod_str) except Exception as e: sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) return None def _is_in_group(opt, group): "Check if opt is in group." for value in group._opts.values(): # NOTE(llu): Temporary workaround for bug #1262148, wait until # newly released oslo.config support '==' operator. if not(value['opt'] != opt): return True return False def _guess_groups(opt, mod_obj): # is it in the DEFAULT group? if _is_in_group(opt, cfg.CONF): return 'DEFAULT' # what other groups is it in? for value in cfg.CONF.values(): if isinstance(value, cfg.CONF.GroupAttr): if _is_in_group(opt, value._group): return value._group.name raise RuntimeError( "Unable to find group for option %s, " "maybe it's defined twice in the same group?" % opt.name ) def _list_opts(obj): def is_opt(o): return (isinstance(o, cfg.Opt) and not isinstance(o, cfg.SubCommandOpt)) opts = list() for attr_str in dir(obj): attr_obj = getattr(obj, attr_str) if is_opt(attr_obj): opts.append(attr_obj) elif (isinstance(attr_obj, list) and all(map(lambda x: is_opt(x), attr_obj))): opts.extend(attr_obj) ret = {} for opt in opts: ret.setdefault(_guess_groups(opt, obj), []).append(opt) return ret.items() def print_group_opts(group, opts_by_module): print("[%s]" % group) print('') for mod, opts in opts_by_module: print('#') print('# Options defined in %s' % mod) print('#') print('') for opt in opts: _print_opt(opt) print('') def _get_my_ip(): try: csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) csock.connect(('8.8.8.8', 80)) (addr, port) = csock.getsockname() csock.close() return addr except socket.error: return None def _sanitize_default(name, value): """Set up a reasonably sensible default for pybasedir, my_ip and host.""" if value.startswith(sys.prefix): # NOTE(jd) Don't use os.path.join, because it is likely to think the # second part is an absolute pathname and therefore drop the first # part. value = os.path.normpath("/usr/" + value[len(sys.prefix):]) elif value.startswith(BASEDIR): return value.replace(BASEDIR, '/usr/lib/python/site-packages') elif BASEDIR in value: return value.replace(BASEDIR, '') elif value == _get_my_ip(): return '10.0.0.1' elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: return 'ceilometer' elif value.strip() != value: return '"%s"' % value return value def _print_opt(opt): opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help if not opt_help: sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) opt_help = "" opt_type = None try: opt_type = OPTION_REGEX.search(str(type(opt))).group(0) except (ValueError, AttributeError) as err: sys.stderr.write("%s\n" % str(err)) sys.exit(1) opt_help = u'%s (%s)' % (opt_help, OPT_TYPES[opt_type]) print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) if opt.deprecated_opts: for deprecated_opt in opt.deprecated_opts: if deprecated_opt.name: deprecated_group = (deprecated_opt.group if deprecated_opt.group else "DEFAULT") print('# Deprecated group/name - [%s]/%s' % (deprecated_group, deprecated_opt.name)) try: if opt_default is None: print('#%s=' % opt_name) elif opt_type == STROPT: assert(isinstance(opt_default, six.string_types)) print('#%s=%s' % (opt_name, _sanitize_default(opt_name, opt_default))) elif opt_type == BOOLOPT: assert(isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, str(opt_default).lower())) elif opt_type == INTOPT: assert(isinstance(opt_default, int) and not isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == FLOATOPT: assert(isinstance(opt_default, float)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == LISTOPT: assert(isinstance(opt_default, list)) print('#%s=%s' % (opt_name, ','.join(opt_default))) elif opt_type == DICTOPT: assert(isinstance(opt_default, dict)) opt_default_strlist = [str(key) + ':' + str(value) for (key, value) in opt_default.items()] print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) elif opt_type == MULTISTROPT: assert(isinstance(opt_default, list)) if not opt_default: opt_default = [''] for default in opt_default: print('#%s=%s' % (opt_name, default)) print('') except Exception: sys.stderr.write('Error in option "%s"\n' % opt_name) sys.exit(1) def main(): generate(sys.argv[1:]) if __name__ == '__main__': main() ceilometer-2014.1.5/ceilometer/openstack/common/config/__init__.py0000664000567000056700000000000012540642615026214 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/rpc/0000775000567000056700000000000012540643223023430 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/rpc/impl_zmq.py0000664000567000056700000006354312540642615025651 0ustar jenkinsjenkins00000000000000# Copyright 2011 Cloudscaling Group, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import pprint import re import socket import sys import types import uuid import eventlet import greenlet from oslo.config import cfg import six from six import moves from ceilometer.openstack.common import excutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common.rpc import common as rpc_common zmq = importutils.try_import('eventlet.green.zmq') # for convenience, are not modified. pformat = pprint.pformat Timeout = eventlet.timeout.Timeout LOG = rpc_common.LOG RemoteError = rpc_common.RemoteError RPCException = rpc_common.RPCException zmq_opts = [ cfg.StrOpt('rpc_zmq_bind_address', default='*', help='ZeroMQ bind address. Should be a wildcard (*), ' 'an ethernet interface, or IP. ' 'The "host" option should point or resolve to this ' 'address.'), # The module.Class to use for matchmaking. cfg.StrOpt( 'rpc_zmq_matchmaker', default=('ceilometer.openstack.common.rpc.' 'matchmaker.MatchMakerLocalhost'), help='MatchMaker driver', ), # The following port is unassigned by IANA as of 2012-05-21 cfg.IntOpt('rpc_zmq_port', default=9501, help='ZeroMQ receiver listening port'), cfg.IntOpt('rpc_zmq_contexts', default=1, help='Number of ZeroMQ contexts, defaults to 1'), cfg.IntOpt('rpc_zmq_topic_backlog', default=None, help='Maximum number of ingress messages to locally buffer ' 'per topic. Default is unlimited.'), cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack', help='Directory for holding IPC sockets'), cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(), help='Name of this node. Must be a valid hostname, FQDN, or ' 'IP address. Must match "host" option, if running Nova.') ] CONF = cfg.CONF CONF.register_opts(zmq_opts) ZMQ_CTX = None # ZeroMQ Context, must be global. matchmaker = None # memorized matchmaker object def _serialize(data): """Serialization wrapper. We prefer using JSON, but it cannot encode all types. Error if a developer passes us bad data. """ try: return jsonutils.dumps(data, ensure_ascii=True) except TypeError: with excutils.save_and_reraise_exception(): LOG.error(_("JSON serialization failed.")) def _deserialize(data): """Deserialization wrapper.""" LOG.debug(_("Deserializing: %s"), data) return jsonutils.loads(data) class ZmqSocket(object): """A tiny wrapper around ZeroMQ. Simplifies the send/recv protocol and connection management. Can be used as a Context (supports the 'with' statement). """ def __init__(self, addr, zmq_type, bind=True, subscribe=None): self.sock = _get_ctxt().socket(zmq_type) self.addr = addr self.type = zmq_type self.subscriptions = [] # Support failures on sending/receiving on wrong socket type. self.can_recv = zmq_type in (zmq.PULL, zmq.SUB) self.can_send = zmq_type in (zmq.PUSH, zmq.PUB) self.can_sub = zmq_type in (zmq.SUB, ) # Support list, str, & None for subscribe arg (cast to list) do_sub = { list: subscribe, str: [subscribe], type(None): [] }[type(subscribe)] for f in do_sub: self.subscribe(f) str_data = {'addr': addr, 'type': self.socket_s(), 'subscribe': subscribe, 'bind': bind} LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data) LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data) LOG.debug(_("-> bind: %(bind)s"), str_data) try: if bind: self.sock.bind(addr) else: self.sock.connect(addr) except Exception: raise RPCException(_("Could not open socket.")) def socket_s(self): """Get socket type as string.""" t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER', 'DEALER') return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type] def subscribe(self, msg_filter): """Subscribe.""" if not self.can_sub: raise RPCException("Cannot subscribe on this socket.") LOG.debug(_("Subscribing to %s"), msg_filter) try: self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter) except Exception: return self.subscriptions.append(msg_filter) def unsubscribe(self, msg_filter): """Unsubscribe.""" if msg_filter not in self.subscriptions: return self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter) self.subscriptions.remove(msg_filter) def close(self): if self.sock is None or self.sock.closed: return # We must unsubscribe, or we'll leak descriptors. if self.subscriptions: for f in self.subscriptions: try: self.sock.setsockopt(zmq.UNSUBSCRIBE, f) except Exception: pass self.subscriptions = [] try: # Default is to linger self.sock.close() except Exception: # While this is a bad thing to happen, # it would be much worse if some of the code calling this # were to fail. For now, lets log, and later evaluate # if we can safely raise here. LOG.error(_("ZeroMQ socket could not be closed.")) self.sock = None def recv(self, **kwargs): if not self.can_recv: raise RPCException(_("You cannot recv on this socket.")) return self.sock.recv_multipart(**kwargs) def send(self, data, **kwargs): if not self.can_send: raise RPCException(_("You cannot send on this socket.")) self.sock.send_multipart(data, **kwargs) class ZmqClient(object): """Client for ZMQ sockets.""" def __init__(self, addr): self.outq = ZmqSocket(addr, zmq.PUSH, bind=False) def cast(self, msg_id, topic, data, envelope): msg_id = msg_id or 0 if not envelope: self.outq.send(map(bytes, (msg_id, topic, 'cast', _serialize(data)))) return rpc_envelope = rpc_common.serialize_msg(data[1], envelope) zmq_msg = moves.reduce(lambda x, y: x + y, rpc_envelope.items()) self.outq.send(map(bytes, (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg)) def close(self): self.outq.close() class RpcContext(rpc_common.CommonRpcContext): """Context that supports replying to a rpc.call.""" def __init__(self, **kwargs): self.replies = [] super(RpcContext, self).__init__(**kwargs) def deepcopy(self): values = self.to_dict() values['replies'] = self.replies return self.__class__(**values) def reply(self, reply=None, failure=None, ending=False): if ending: return self.replies.append(reply) @classmethod def marshal(self, ctx): ctx_data = ctx.to_dict() return _serialize(ctx_data) @classmethod def unmarshal(self, data): return RpcContext.from_dict(_deserialize(data)) class InternalContext(object): """Used by ConsumerBase as a private context for - methods.""" def __init__(self, proxy): self.proxy = proxy self.msg_waiter = None def _get_response(self, ctx, proxy, topic, data): """Process a curried message and cast the result to topic.""" LOG.debug(_("Running func with context: %s"), ctx.to_dict()) data.setdefault('version', None) data.setdefault('args', {}) try: result = proxy.dispatch( ctx, data['version'], data['method'], data.get('namespace'), **data['args']) return ConsumerBase.normalize_reply(result, ctx.replies) except greenlet.GreenletExit: # ignore these since they are just from shutdowns pass except rpc_common.ClientException as e: LOG.debug(_("Expected exception during message handling (%s)") % e._exc_info[1]) return {'exc': rpc_common.serialize_remote_exception(e._exc_info, log_failure=False)} except Exception: LOG.error(_("Exception during message handling")) return {'exc': rpc_common.serialize_remote_exception(sys.exc_info())} def reply(self, ctx, proxy, msg_id=None, context=None, topic=None, msg=None): """Reply to a casted call.""" # NOTE(ewindisch): context kwarg exists for Grizzly compat. # this may be able to be removed earlier than # 'I' if ConsumerBase.process were refactored. if type(msg) is list: payload = msg[-1] else: payload = msg response = ConsumerBase.normalize_reply( self._get_response(ctx, proxy, topic, payload), ctx.replies) LOG.debug(_("Sending reply")) _multi_send(_cast, ctx, topic, { 'method': '-process_reply', 'args': { 'msg_id': msg_id, # Include for Folsom compat. 'response': response } }, _msg_id=msg_id) class ConsumerBase(object): """Base Consumer.""" def __init__(self): self.private_ctx = InternalContext(None) @classmethod def normalize_reply(self, result, replies): #TODO(ewindisch): re-evaluate and document this method. if isinstance(result, types.GeneratorType): return list(result) elif replies: return replies else: return [result] def process(self, proxy, ctx, data): data.setdefault('version', None) data.setdefault('args', {}) # Method starting with - are # processed internally. (non-valid method name) method = data.get('method') if not method: LOG.error(_("RPC message did not include method.")) return # Internal method # uses internal context for safety. if method == '-reply': self.private_ctx.reply(ctx, proxy, **data['args']) return proxy.dispatch(ctx, data['version'], data['method'], data.get('namespace'), **data['args']) class ZmqBaseReactor(ConsumerBase): """A consumer class implementing a centralized casting broker (PULL-PUSH). Used for RoundRobin requests. """ def __init__(self, conf): super(ZmqBaseReactor, self).__init__() self.proxies = {} self.threads = [] self.sockets = [] self.subscribe = {} self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size) def register(self, proxy, in_addr, zmq_type_in, in_bind=True, subscribe=None): LOG.info(_("Registering reactor")) if zmq_type_in not in (zmq.PULL, zmq.SUB): raise RPCException("Bad input socktype") # Items push in. inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind, subscribe=subscribe) self.proxies[inq] = proxy self.sockets.append(inq) LOG.info(_("In reactor registered")) def consume_in_thread(self): @excutils.forever_retry_uncaught_exceptions def _consume(sock): LOG.info(_("Consuming socket")) while True: self.consume(sock) for k in self.proxies.keys(): self.threads.append( self.pool.spawn(_consume, k) ) def wait(self): for t in self.threads: t.wait() def close(self): for s in self.sockets: s.close() for t in self.threads: t.kill() class ZmqProxy(ZmqBaseReactor): """A consumer class implementing a topic-based proxy. Forwards to IPC sockets. """ def __init__(self, conf): super(ZmqProxy, self).__init__(conf) pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) self.topic_proxy = {} def consume(self, sock): ipc_dir = CONF.rpc_zmq_ipc_dir data = sock.recv(copy=False) topic = data[1].bytes if topic.startswith('fanout~'): sock_type = zmq.PUB topic = topic.split('.', 1)[0] elif topic.startswith('zmq_replies'): sock_type = zmq.PUB else: sock_type = zmq.PUSH if topic not in self.topic_proxy: def publisher(waiter): LOG.info(_("Creating proxy for topic: %s"), topic) try: # The topic is received over the network, # don't trust this input. if self.badchars.search(topic) is not None: emsg = _("Topic contained dangerous characters.") LOG.warn(emsg) raise RPCException(emsg) out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic), sock_type, bind=True) except RPCException: waiter.send_exception(*sys.exc_info()) return self.topic_proxy[topic] = eventlet.queue.LightQueue( CONF.rpc_zmq_topic_backlog) self.sockets.append(out_sock) # It takes some time for a pub socket to open, # before we can have any faith in doing a send() to it. if sock_type == zmq.PUB: eventlet.sleep(.5) waiter.send(True) while(True): data = self.topic_proxy[topic].get() out_sock.send(data, copy=False) wait_sock_creation = eventlet.event.Event() eventlet.spawn(publisher, wait_sock_creation) try: wait_sock_creation.wait() except RPCException: LOG.error(_("Topic socket file creation failed.")) return try: self.topic_proxy[topic].put_nowait(data) except eventlet.queue.Full: LOG.error(_("Local per-topic backlog buffer full for topic " "%(topic)s. Dropping message.") % {'topic': topic}) def consume_in_thread(self): """Runs the ZmqProxy service.""" ipc_dir = CONF.rpc_zmq_ipc_dir consume_in = "tcp://%s:%s" % \ (CONF.rpc_zmq_bind_address, CONF.rpc_zmq_port) consumption_proxy = InternalContext(None) try: os.makedirs(ipc_dir) except os.error: if not os.path.isdir(ipc_dir): with excutils.save_and_reraise_exception(): LOG.error(_("Required IPC directory does not exist at" " %s") % (ipc_dir, )) try: self.register(consumption_proxy, consume_in, zmq.PULL) except zmq.ZMQError: if os.access(ipc_dir, os.X_OK): with excutils.save_and_reraise_exception(): LOG.error(_("Permission denied to IPC directory at" " %s") % (ipc_dir, )) with excutils.save_and_reraise_exception(): LOG.error(_("Could not create ZeroMQ receiver daemon. " "Socket may already be in use.")) super(ZmqProxy, self).consume_in_thread() def unflatten_envelope(packenv): """Unflattens the RPC envelope. Takes a list and returns a dictionary. i.e. [1,2,3,4] => {1: 2, 3: 4} """ i = iter(packenv) h = {} try: while True: k = six.next(i) h[k] = six.next(i) except StopIteration: return h class ZmqReactor(ZmqBaseReactor): """A consumer class implementing a consumer for messages. Can also be used as a 1:1 proxy """ def __init__(self, conf): super(ZmqReactor, self).__init__(conf) def consume(self, sock): #TODO(ewindisch): use zero-copy (i.e. references, not copying) data = sock.recv() LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) proxy = self.proxies[sock] if data[2] == 'cast': # Legacy protocol packenv = data[3] ctx, msg = _deserialize(packenv) request = rpc_common.deserialize_msg(msg) ctx = RpcContext.unmarshal(ctx) elif data[2] == 'impl_zmq_v2': packenv = data[4:] msg = unflatten_envelope(packenv) request = rpc_common.deserialize_msg(msg) # Unmarshal only after verifying the message. ctx = RpcContext.unmarshal(data[3]) else: LOG.error(_("ZMQ Envelope version unsupported or unknown.")) return self.pool.spawn_n(self.process, proxy, ctx, request) class Connection(rpc_common.Connection): """Manages connections and threads.""" def __init__(self, conf): self.topics = [] self.reactor = ZmqReactor(conf) def create_consumer(self, topic, proxy, fanout=False): # Register with matchmaker. _get_matchmaker().register(topic, CONF.rpc_zmq_host) # Subscription scenarios if fanout: sock_type = zmq.SUB subscribe = ('', fanout)[type(fanout) == str] topic = 'fanout~' + topic.split('.', 1)[0] else: sock_type = zmq.PULL subscribe = None topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host)) if topic in self.topics: LOG.info(_("Skipping topic registration. Already registered.")) return # Receive messages from (local) proxy inaddr = "ipc://%s/zmq_topic_%s" % \ (CONF.rpc_zmq_ipc_dir, topic) LOG.debug(_("Consumer is a zmq.%s"), ['PULL', 'SUB'][sock_type == zmq.SUB]) self.reactor.register(proxy, inaddr, sock_type, subscribe=subscribe, in_bind=False) self.topics.append(topic) def close(self): _get_matchmaker().stop_heartbeat() for topic in self.topics: _get_matchmaker().unregister(topic, CONF.rpc_zmq_host) self.reactor.close() self.topics = [] def wait(self): self.reactor.wait() def consume_in_thread(self): _get_matchmaker().start_heartbeat() self.reactor.consume_in_thread() def _cast(addr, context, topic, msg, timeout=None, envelope=False, _msg_id=None): timeout_cast = timeout or CONF.rpc_cast_timeout payload = [RpcContext.marshal(context), msg] with Timeout(timeout_cast, exception=rpc_common.Timeout): try: conn = ZmqClient(addr) # assumes cast can't return an exception conn.cast(_msg_id, topic, payload, envelope) except zmq.ZMQError: raise RPCException("Cast failed. ZMQ Socket Exception") finally: if 'conn' in vars(): conn.close() def _call(addr, context, topic, msg, timeout=None, envelope=False): # timeout_response is how long we wait for a response timeout = timeout or CONF.rpc_response_timeout # The msg_id is used to track replies. msg_id = uuid.uuid4().hex # Replies always come into the reply service. reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host LOG.debug(_("Creating payload")) # Curry the original request into a reply method. mcontext = RpcContext.marshal(context) payload = { 'method': '-reply', 'args': { 'msg_id': msg_id, 'topic': reply_topic, # TODO(ewindisch): safe to remove mcontext in I. 'msg': [mcontext, msg] } } LOG.debug(_("Creating queue socket for reply waiter")) # Messages arriving async. # TODO(ewindisch): have reply consumer with dynamic subscription mgmt with Timeout(timeout, exception=rpc_common.Timeout): try: msg_waiter = ZmqSocket( "ipc://%s/zmq_topic_zmq_replies.%s" % (CONF.rpc_zmq_ipc_dir, CONF.rpc_zmq_host), zmq.SUB, subscribe=msg_id, bind=False ) LOG.debug(_("Sending cast")) _cast(addr, context, topic, payload, envelope) LOG.debug(_("Cast sent; Waiting reply")) # Blocks until receives reply msg = msg_waiter.recv() LOG.debug(_("Received message: %s"), msg) LOG.debug(_("Unpacking response")) if msg[2] == 'cast': # Legacy version raw_msg = _deserialize(msg[-1])[-1] elif msg[2] == 'impl_zmq_v2': rpc_envelope = unflatten_envelope(msg[4:]) raw_msg = rpc_common.deserialize_msg(rpc_envelope) else: raise rpc_common.UnsupportedRpcEnvelopeVersion( _("Unsupported or unknown ZMQ envelope returned.")) responses = raw_msg['args']['response'] # ZMQError trumps the Timeout error. except zmq.ZMQError: raise RPCException("ZMQ Socket Error") except (IndexError, KeyError): raise RPCException(_("RPC Message Invalid.")) finally: if 'msg_waiter' in vars(): msg_waiter.close() # It seems we don't need to do all of the following, # but perhaps it would be useful for multicall? # One effect of this is that we're checking all # responses for Exceptions. for resp in responses: if isinstance(resp, types.DictType) and 'exc' in resp: raise rpc_common.deserialize_remote_exception(CONF, resp['exc']) return responses[-1] def _multi_send(method, context, topic, msg, timeout=None, envelope=False, _msg_id=None): """Wraps the sending of messages. Dispatches to the matchmaker and sends message to all relevant hosts. """ conf = CONF LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))}) queues = _get_matchmaker().queues(topic) LOG.debug(_("Sending message(s) to: %s"), queues) # Don't stack if we have no matchmaker results if not queues: LOG.warn(_("No matchmaker results. Not casting.")) # While not strictly a timeout, callers know how to handle # this exception and a timeout isn't too big a lie. raise rpc_common.Timeout(_("No match from matchmaker.")) # This supports brokerless fanout (addresses > 1) for queue in queues: (_topic, ip_addr) = queue _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port) if method.__name__ == '_cast': eventlet.spawn_n(method, _addr, context, _topic, msg, timeout, envelope, _msg_id) return return method(_addr, context, _topic, msg, timeout, envelope) def create_connection(conf, new=True): return Connection(conf) def multicall(conf, *args, **kwargs): """Multiple calls.""" return _multi_send(_call, *args, **kwargs) def call(conf, *args, **kwargs): """Send a message, expect a response.""" data = _multi_send(_call, *args, **kwargs) return data[-1] def cast(conf, *args, **kwargs): """Send a message expecting no reply.""" _multi_send(_cast, *args, **kwargs) def fanout_cast(conf, context, topic, msg, **kwargs): """Send a message to all listening and expect no reply.""" # NOTE(ewindisch): fanout~ is used because it avoid splitting on . # and acts as a non-subtle hint to the matchmaker and ZmqProxy. _multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs) def notify(conf, context, topic, msg, envelope): """Send notification event. Notifications are sent to topic-priority. This differs from the AMQP drivers which send to topic.priority. """ # NOTE(ewindisch): dot-priority in rpc notifier does not # work with our assumptions. topic = topic.replace('.', '-') cast(conf, context, topic, msg, envelope=envelope) def cleanup(): """Clean up resources in use by implementation.""" global ZMQ_CTX if ZMQ_CTX: ZMQ_CTX.term() ZMQ_CTX = None global matchmaker matchmaker = None def _get_ctxt(): if not zmq: raise ImportError("Failed to import eventlet.green.zmq") global ZMQ_CTX if not ZMQ_CTX: ZMQ_CTX = zmq.Context(CONF.rpc_zmq_contexts) return ZMQ_CTX def _get_matchmaker(*args, **kwargs): global matchmaker if not matchmaker: mm = CONF.rpc_zmq_matchmaker if mm.endswith('matchmaker.MatchMakerRing'): mm.replace('matchmaker', 'matchmaker_ring') LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use' ' %(new)s instead') % dict( orig=CONF.rpc_zmq_matchmaker, new=mm)) matchmaker = importutils.import_object(mm, *args, **kwargs) return matchmaker ceilometer-2014.1.5/ceilometer/openstack/common/rpc/matchmaker_ring.py0000664000567000056700000000672112540642615027147 0ustar jenkinsjenkins00000000000000# Copyright 2011-2013 Cloudscaling Group, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The MatchMaker classes should except a Topic or Fanout exchange key and return keys for direct exchanges, per (approximate) AMQP parlance. """ import itertools import json from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.rpc import matchmaker as mm matchmaker_opts = [ # Matchmaker ring file cfg.StrOpt('ringfile', deprecated_name='matchmaker_ringfile', deprecated_group='DEFAULT', default='/etc/oslo/matchmaker_ring.json', help='Matchmaker ring file (JSON)'), ] CONF = cfg.CONF CONF.register_opts(matchmaker_opts, 'matchmaker_ring') LOG = logging.getLogger(__name__) class RingExchange(mm.Exchange): """Match Maker where hosts are loaded from a static JSON formatted file. __init__ takes optional ring dictionary argument, otherwise loads the ringfile from CONF.mathcmaker_ringfile. """ def __init__(self, ring=None): super(RingExchange, self).__init__() if ring: self.ring = ring else: fh = open(CONF.matchmaker_ring.ringfile, 'r') self.ring = json.load(fh) fh.close() self.ring0 = {} for k in self.ring.keys(): self.ring0[k] = itertools.cycle(self.ring[k]) def _ring_has(self, key): return key in self.ring0 class RoundRobinRingExchange(RingExchange): """A Topic Exchange based on a hashmap.""" def __init__(self, ring=None): super(RoundRobinRingExchange, self).__init__(ring) def run(self, key): if not self._ring_has(key): LOG.warn( _("No key defining hosts for topic '%s', " "see ringfile") % (key, ) ) return [] host = next(self.ring0[key]) return [(key + '.' + host, host)] class FanoutRingExchange(RingExchange): """Fanout Exchange based on a hashmap.""" def __init__(self, ring=None): super(FanoutRingExchange, self).__init__(ring) def run(self, key): # Assume starts with "fanout~", strip it for lookup. nkey = key.split('fanout~')[1:][0] if not self._ring_has(nkey): LOG.warn( _("No key defining hosts for topic '%s', " "see ringfile") % (nkey, ) ) return [] return map(lambda x: (key + '.' + x, x), self.ring[nkey]) class MatchMakerRing(mm.MatchMakerBase): """Match Maker where hosts are loaded from a static hashmap.""" def __init__(self, ring=None): super(MatchMakerRing, self).__init__() self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) self.add_binding(mm.DirectBinding(), mm.DirectExchange()) self.add_binding(mm.TopicBinding(), RoundRobinRingExchange(ring)) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/serializer.py0000664000567000056700000000311412540642615026156 0ustar jenkinsjenkins00000000000000# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Provides the definition of an RPC serialization handler""" import abc import six @six.add_metaclass(abc.ABCMeta) class Serializer(object): """Generic (de-)serialization definition base class.""" @abc.abstractmethod def serialize_entity(self, context, entity): """Serialize something to primitive form. :param context: Security context :param entity: Entity to be serialized :returns: Serialized form of entity """ pass @abc.abstractmethod def deserialize_entity(self, context, entity): """Deserialize something from primitive form. :param context: Security context :param entity: Primitive to be deserialized :returns: Deserialized form of entity """ pass class NoOpSerializer(Serializer): """A serializer that does nothing.""" def serialize_entity(self, context, entity): return entity def deserialize_entity(self, context, entity): return entity ceilometer-2014.1.5/ceilometer/openstack/common/rpc/matchmaker.py0000664000567000056700000002231612540642615026126 0ustar jenkinsjenkins00000000000000# Copyright 2011 Cloudscaling Group, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The MatchMaker classes should except a Topic or Fanout exchange key and return keys for direct exchanges, per (approximate) AMQP parlance. """ import contextlib import eventlet from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging matchmaker_opts = [ cfg.IntOpt('matchmaker_heartbeat_freq', default=300, help='Heartbeat frequency'), cfg.IntOpt('matchmaker_heartbeat_ttl', default=600, help='Heartbeat time-to-live.'), ] CONF = cfg.CONF CONF.register_opts(matchmaker_opts) LOG = logging.getLogger(__name__) contextmanager = contextlib.contextmanager class MatchMakerException(Exception): """Signified a match could not be found.""" message = _("Match not found by MatchMaker.") class Exchange(object): """Implements lookups. Subclass this to support hashtables, dns, etc. """ def __init__(self): pass def run(self, key): raise NotImplementedError() class Binding(object): """A binding on which to perform a lookup.""" def __init__(self): pass def test(self, key): raise NotImplementedError() class MatchMakerBase(object): """Match Maker Base Class. Build off HeartbeatMatchMakerBase if building a heartbeat-capable MatchMaker. """ def __init__(self): # Array of tuples. Index [2] toggles negation, [3] is last-if-true self.bindings = [] self.no_heartbeat_msg = _('Matchmaker does not implement ' 'registration or heartbeat.') def register(self, key, host): """Register a host on a backend. Heartbeats, if applicable, may keepalive registration. """ pass def ack_alive(self, key, host): """Acknowledge that a key.host is alive. Used internally for updating heartbeats, but may also be used publicly to acknowledge a system is alive (i.e. rpc message successfully sent to host) """ pass def is_alive(self, topic, host): """Checks if a host is alive.""" pass def expire(self, topic, host): """Explicitly expire a host's registration.""" pass def send_heartbeats(self): """Send all heartbeats. Use start_heartbeat to spawn a heartbeat greenthread, which loops this method. """ pass def unregister(self, key, host): """Unregister a topic.""" pass def start_heartbeat(self): """Spawn heartbeat greenthread.""" pass def stop_heartbeat(self): """Destroys the heartbeat greenthread.""" pass def add_binding(self, binding, rule, last=True): self.bindings.append((binding, rule, False, last)) #NOTE(ewindisch): kept the following method in case we implement the # underlying support. #def add_negate_binding(self, binding, rule, last=True): # self.bindings.append((binding, rule, True, last)) def queues(self, key): workers = [] # bit is for negate bindings - if we choose to implement it. # last stops processing rules if this matches. for (binding, exchange, bit, last) in self.bindings: if binding.test(key): workers.extend(exchange.run(key)) # Support last. if last: return workers return workers class HeartbeatMatchMakerBase(MatchMakerBase): """Base for a heart-beat capable MatchMaker. Provides common methods for registering, unregistering, and maintaining heartbeats. """ def __init__(self): self.hosts = set() self._heart = None self.host_topic = {} super(HeartbeatMatchMakerBase, self).__init__() def send_heartbeats(self): """Send all heartbeats. Use start_heartbeat to spawn a heartbeat greenthread, which loops this method. """ for key, host in self.host_topic: self.ack_alive(key, host) def ack_alive(self, key, host): """Acknowledge that a host.topic is alive. Used internally for updating heartbeats, but may also be used publicly to acknowledge a system is alive (i.e. rpc message successfully sent to host) """ raise NotImplementedError("Must implement ack_alive") def backend_register(self, key, host): """Implements registration logic. Called by register(self,key,host) """ raise NotImplementedError("Must implement backend_register") def backend_unregister(self, key, key_host): """Implements de-registration logic. Called by unregister(self,key,host) """ raise NotImplementedError("Must implement backend_unregister") def register(self, key, host): """Register a host on a backend. Heartbeats, if applicable, may keepalive registration. """ self.hosts.add(host) self.host_topic[(key, host)] = host key_host = '.'.join((key, host)) self.backend_register(key, key_host) self.ack_alive(key, host) def unregister(self, key, host): """Unregister a topic.""" if (key, host) in self.host_topic: del self.host_topic[(key, host)] self.hosts.discard(host) self.backend_unregister(key, '.'.join((key, host))) LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), {'key': key, 'host': host}) def start_heartbeat(self): """Implementation of MatchMakerBase.start_heartbeat. Launches greenthread looping send_heartbeats(), yielding for CONF.matchmaker_heartbeat_freq seconds between iterations. """ if not self.hosts: raise MatchMakerException( _("Register before starting heartbeat.")) def do_heartbeat(): while True: self.send_heartbeats() eventlet.sleep(CONF.matchmaker_heartbeat_freq) self._heart = eventlet.spawn(do_heartbeat) def stop_heartbeat(self): """Destroys the heartbeat greenthread.""" if self._heart: self._heart.kill() class DirectBinding(Binding): """Specifies a host in the key via a '.' character. Although dots are used in the key, the behavior here is that it maps directly to a host, thus direct. """ def test(self, key): return '.' in key class TopicBinding(Binding): """Where a 'bare' key without dots. AMQP generally considers topic exchanges to be those *with* dots, but we deviate here in terminology as the behavior here matches that of a topic exchange (whereas where there are dots, behavior matches that of a direct exchange. """ def test(self, key): return '.' not in key class FanoutBinding(Binding): """Match on fanout keys, where key starts with 'fanout.' string.""" def test(self, key): return key.startswith('fanout~') class StubExchange(Exchange): """Exchange that does nothing.""" def run(self, key): return [(key, None)] class LocalhostExchange(Exchange): """Exchange where all direct topics are local.""" def __init__(self, host='localhost'): self.host = host super(Exchange, self).__init__() def run(self, key): return [('.'.join((key.split('.')[0], self.host)), self.host)] class DirectExchange(Exchange): """Exchange where all topic keys are split, sending to second half. i.e. "compute.host" sends a message to "compute.host" running on "host" """ def __init__(self): super(Exchange, self).__init__() def run(self, key): e = key.split('.', 1)[1] return [(key, e)] class MatchMakerLocalhost(MatchMakerBase): """Match Maker where all bare topics resolve to localhost. Useful for testing. """ def __init__(self, host='localhost'): super(MatchMakerLocalhost, self).__init__() self.add_binding(FanoutBinding(), LocalhostExchange(host)) self.add_binding(DirectBinding(), DirectExchange()) self.add_binding(TopicBinding(), LocalhostExchange(host)) class MatchMakerStub(MatchMakerBase): """Match Maker where topics are untouched. Useful for testing, or for AMQP/brokered queues. Will not work where knowledge of hosts is known (i.e. zeromq) """ def __init__(self): super(MatchMakerStub, self).__init__() self.add_binding(FanoutBinding(), StubExchange()) self.add_binding(DirectBinding(), StubExchange()) self.add_binding(TopicBinding(), StubExchange()) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/impl_fake.py0000664000567000056700000001333212540642615025737 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fake RPC implementation which calls proxy methods directly with no queues. Casts will block, but this is very useful for tests. """ import inspect # NOTE(russellb): We specifically want to use json, not our own jsonutils. # jsonutils has some extra logic to automatically convert objects to primitive # types so that they can be serialized. We want to catch all cases where # non-primitive types make it into this code and treat it as an error. import json import time import eventlet import six from ceilometer.openstack.common.rpc import common as rpc_common CONSUMERS = {} class RpcContext(rpc_common.CommonRpcContext): def __init__(self, **kwargs): super(RpcContext, self).__init__(**kwargs) self._response = [] self._done = False def deepcopy(self): values = self.to_dict() new_inst = self.__class__(**values) new_inst._response = self._response new_inst._done = self._done return new_inst def reply(self, reply=None, failure=None, ending=False): if ending: self._done = True if not self._done: self._response.append((reply, failure)) class Consumer(object): def __init__(self, topic, proxy): self.topic = topic self.proxy = proxy def call(self, context, version, method, namespace, args, timeout): done = eventlet.event.Event() def _inner(): ctxt = RpcContext.from_dict(context.to_dict()) try: rval = self.proxy.dispatch(context, version, method, namespace, **args) res = [] # Caller might have called ctxt.reply() manually for (reply, failure) in ctxt._response: if failure: six.reraise(failure[0], failure[1], failure[2]) res.append(reply) # if ending not 'sent'...we might have more data to # return from the function itself if not ctxt._done: if inspect.isgenerator(rval): for val in rval: res.append(val) else: res.append(rval) done.send(res) except rpc_common.ClientException as e: done.send_exception(e._exc_info[1]) except Exception as e: done.send_exception(e) thread = eventlet.greenthread.spawn(_inner) if timeout: start_time = time.time() while not done.ready(): eventlet.greenthread.sleep(1) cur_time = time.time() if (cur_time - start_time) > timeout: thread.kill() raise rpc_common.Timeout() return done.wait() class Connection(object): """Connection object.""" def __init__(self): self.consumers = [] def create_consumer(self, topic, proxy, fanout=False): consumer = Consumer(topic, proxy) self.consumers.append(consumer) if topic not in CONSUMERS: CONSUMERS[topic] = [] CONSUMERS[topic].append(consumer) def close(self): for consumer in self.consumers: CONSUMERS[consumer.topic].remove(consumer) self.consumers = [] def consume_in_thread(self): pass def create_connection(conf, new=True): """Create a connection.""" return Connection() def check_serialize(msg): """Make sure a message intended for rpc can be serialized.""" json.dumps(msg) def multicall(conf, context, topic, msg, timeout=None): """Make a call that returns multiple times.""" check_serialize(msg) method = msg.get('method') if not method: return args = msg.get('args', {}) version = msg.get('version', None) namespace = msg.get('namespace', None) try: consumer = CONSUMERS[topic][0] except (KeyError, IndexError): raise rpc_common.Timeout("No consumers available") else: return consumer.call(context, version, method, namespace, args, timeout) def call(conf, context, topic, msg, timeout=None): """Sends a message on a topic and wait for a response.""" rv = multicall(conf, context, topic, msg, timeout) # NOTE(vish): return the last result from the multicall rv = list(rv) if not rv: return return rv[-1] def cast(conf, context, topic, msg): check_serialize(msg) try: call(conf, context, topic, msg) except Exception: pass def notify(conf, context, topic, msg, envelope): check_serialize(msg) def cleanup(): pass def fanout_cast(conf, context, topic, msg): """Cast to all consumers of a topic.""" check_serialize(msg) method = msg.get('method') if not method: return args = msg.get('args', {}) version = msg.get('version', None) namespace = msg.get('namespace', None) for consumer in CONSUMERS.get(topic, []): try: consumer.call(context, version, method, namespace, args, None) except Exception: pass ceilometer-2014.1.5/ceilometer/openstack/common/rpc/impl_qpid.py0000664000567000056700000007213212540642615025771 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation # Copyright 2011 - 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import itertools import time import eventlet import greenlet from oslo.config import cfg import six from ceilometer.openstack.common import excutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.rpc import amqp as rpc_amqp from ceilometer.openstack.common.rpc import common as rpc_common qpid_codec = importutils.try_import("qpid.codec010") qpid_messaging = importutils.try_import("qpid.messaging") qpid_exceptions = importutils.try_import("qpid.messaging.exceptions") LOG = logging.getLogger(__name__) qpid_opts = [ cfg.StrOpt('qpid_hostname', default='localhost', help='Qpid broker hostname'), cfg.IntOpt('qpid_port', default=5672, help='Qpid broker port'), cfg.ListOpt('qpid_hosts', default=['$qpid_hostname:$qpid_port'], help='Qpid HA cluster host:port pairs'), cfg.StrOpt('qpid_username', default='', help='Username for qpid connection'), cfg.StrOpt('qpid_password', default='', help='Password for qpid connection', secret=True), cfg.StrOpt('qpid_sasl_mechanisms', default='', help='Space separated list of SASL mechanisms to use for auth'), cfg.IntOpt('qpid_heartbeat', default=60, help='Seconds between connection keepalive heartbeats'), cfg.StrOpt('qpid_protocol', default='tcp', help="Transport to use, either 'tcp' or 'ssl'"), cfg.BoolOpt('qpid_tcp_nodelay', default=True, help='Disable Nagle algorithm'), # NOTE(russellb) If any additional versions are added (beyond 1 and 2), # this file could probably use some additional refactoring so that the # differences between each version are split into different classes. cfg.IntOpt('qpid_topology_version', default=1, help="The qpid topology version to use. Version 1 is what " "was originally used by impl_qpid. Version 2 includes " "some backwards-incompatible changes that allow broker " "federation to work. Users should update to version 2 " "when they are able to take everything down, as it " "requires a clean break."), ] cfg.CONF.register_opts(qpid_opts) JSON_CONTENT_TYPE = 'application/json; charset=utf8' def raise_invalid_topology_version(conf): msg = (_("Invalid value for qpid_topology_version: %d") % conf.qpid_topology_version) LOG.error(msg) raise Exception(msg) class ConsumerBase(object): """Consumer base class.""" def __init__(self, conf, session, callback, node_name, node_opts, link_name, link_opts): """Declare a queue on an amqp session. 'session' is the amqp session to use 'callback' is the callback to call when messages are received 'node_name' is the first part of the Qpid address string, before ';' 'node_opts' will be applied to the "x-declare" section of "node" in the address string. 'link_name' goes into the "name" field of the "link" in the address string 'link_opts' will be applied to the "x-declare" section of "link" in the address string. """ self.callback = callback self.receiver = None self.session = None if conf.qpid_topology_version == 1: addr_opts = { "create": "always", "node": { "type": "topic", "x-declare": { "durable": True, "auto-delete": True, }, }, "link": { "durable": True, "x-declare": { "durable": False, "auto-delete": True, "exclusive": False, }, }, } addr_opts["node"]["x-declare"].update(node_opts) elif conf.qpid_topology_version == 2: addr_opts = { "link": { "x-declare": { "auto-delete": True, "exclusive": False, }, }, } else: raise_invalid_topology_version() addr_opts["link"]["x-declare"].update(link_opts) if link_name: addr_opts["link"]["name"] = link_name self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) self.connect(session) def connect(self, session): """Declare the receiver on connect.""" self._declare_receiver(session) def reconnect(self, session): """Re-declare the receiver after a qpid reconnect.""" self._declare_receiver(session) def _declare_receiver(self, session): self.session = session self.receiver = session.receiver(self.address) self.receiver.capacity = 1 def _unpack_json_msg(self, msg): """Load the JSON data in msg if msg.content_type indicates that it is necessary. Put the loaded data back into msg.content and update msg.content_type appropriately. A Qpid Message containing a dict will have a content_type of 'amqp/map', whereas one containing a string that needs to be converted back from JSON will have a content_type of JSON_CONTENT_TYPE. :param msg: a Qpid Message object :returns: None """ if msg.content_type == JSON_CONTENT_TYPE: msg.content = jsonutils.loads(msg.content) msg.content_type = 'amqp/map' def consume(self): """Fetch the message and pass it to the callback object.""" message = self.receiver.fetch() try: self._unpack_json_msg(message) msg = rpc_common.deserialize_msg(message.content) self.callback(msg) except Exception: LOG.exception(_("Failed to process message... skipping it.")) finally: # TODO(sandy): Need support for optional ack_on_error. self.session.acknowledge(message) def get_receiver(self): return self.receiver def get_node_name(self): return self.address.split(';')[0] class DirectConsumer(ConsumerBase): """Queue/consumer class for 'direct'.""" def __init__(self, conf, session, msg_id, callback): """Init a 'direct' queue. 'session' is the amqp session to use 'msg_id' is the msg_id to listen on 'callback' is the callback to call when messages are received """ link_opts = { "auto-delete": conf.amqp_auto_delete, "exclusive": True, "durable": conf.amqp_durable_queues, } if conf.qpid_topology_version == 1: node_name = "%s/%s" % (msg_id, msg_id) node_opts = {"type": "direct"} link_name = msg_id elif conf.qpid_topology_version == 2: node_name = "amq.direct/%s" % msg_id node_opts = {} link_name = None else: raise_invalid_topology_version() super(DirectConsumer, self).__init__(conf, session, callback, node_name, node_opts, link_name, link_opts) class TopicConsumer(ConsumerBase): """Consumer class for 'topic'.""" def __init__(self, conf, session, topic, callback, name=None, exchange_name=None): """Init a 'topic' queue. :param session: the amqp session to use :param topic: is the topic to listen on :paramtype topic: str :param callback: the callback to call when messages are received :param name: optional queue name, defaults to topic """ exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) link_opts = { "auto-delete": conf.amqp_auto_delete, "durable": conf.amqp_durable_queues, } if conf.qpid_topology_version == 1: node_name = "%s/%s" % (exchange_name, topic) elif conf.qpid_topology_version == 2: node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) else: raise_invalid_topology_version() super(TopicConsumer, self).__init__(conf, session, callback, node_name, {}, name or topic, link_opts) class FanoutConsumer(ConsumerBase): """Consumer class for 'fanout'.""" def __init__(self, conf, session, topic, callback): """Init a 'fanout' queue. 'session' is the amqp session to use 'topic' is the topic to listen on 'callback' is the callback to call when messages are received """ self.conf = conf link_opts = {"exclusive": True} if conf.qpid_topology_version == 1: node_name = "%s_fanout" % topic node_opts = {"durable": False, "type": "fanout"} elif conf.qpid_topology_version == 2: node_name = "amq.topic/fanout/%s" % topic node_opts = {} else: raise_invalid_topology_version() super(FanoutConsumer, self).__init__(conf, session, callback, node_name, node_opts, None, link_opts) class Publisher(object): """Base Publisher class.""" def __init__(self, conf, session, node_name, node_opts=None): """Init the Publisher class with the exchange_name, routing_key, and other options """ self.sender = None self.session = session if conf.qpid_topology_version == 1: addr_opts = { "create": "always", "node": { "type": "topic", "x-declare": { "durable": False, # auto-delete isn't implemented for exchanges in qpid, # but put in here anyway "auto-delete": True, }, }, } if node_opts: addr_opts["node"]["x-declare"].update(node_opts) self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) elif conf.qpid_topology_version == 2: self.address = node_name else: raise_invalid_topology_version() self.reconnect(session) def reconnect(self, session): """Re-establish the Sender after a reconnection.""" self.sender = session.sender(self.address) def _pack_json_msg(self, msg): """Qpid cannot serialize dicts containing strings longer than 65535 characters. This function dumps the message content to a JSON string, which Qpid is able to handle. :param msg: May be either a Qpid Message object or a bare dict. :returns: A Qpid Message with its content field JSON encoded. """ try: msg.content = jsonutils.dumps(msg.content) except AttributeError: # Need to have a Qpid message so we can set the content_type. msg = qpid_messaging.Message(jsonutils.dumps(msg)) msg.content_type = JSON_CONTENT_TYPE return msg def send(self, msg): """Send a message.""" try: # Check if Qpid can encode the message check_msg = msg if not hasattr(check_msg, 'content_type'): check_msg = qpid_messaging.Message(msg) content_type = check_msg.content_type enc, dec = qpid_messaging.message.get_codec(content_type) enc(check_msg.content) except qpid_codec.CodecException: # This means the message couldn't be serialized as a dict. msg = self._pack_json_msg(msg) self.sender.send(msg) class DirectPublisher(Publisher): """Publisher class for 'direct'.""" def __init__(self, conf, session, msg_id): """Init a 'direct' publisher.""" if conf.qpid_topology_version == 1: node_name = "%s/%s" % (msg_id, msg_id) node_opts = {"type": "direct"} elif conf.qpid_topology_version == 2: node_name = "amq.direct/%s" % msg_id node_opts = {} else: raise_invalid_topology_version() super(DirectPublisher, self).__init__(conf, session, node_name, node_opts) class TopicPublisher(Publisher): """Publisher class for 'topic'.""" def __init__(self, conf, session, topic): """Init a 'topic' publisher. """ exchange_name = rpc_amqp.get_control_exchange(conf) if conf.qpid_topology_version == 1: node_name = "%s/%s" % (exchange_name, topic) elif conf.qpid_topology_version == 2: node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) else: raise_invalid_topology_version() super(TopicPublisher, self).__init__(conf, session, node_name) class FanoutPublisher(Publisher): """Publisher class for 'fanout'.""" def __init__(self, conf, session, topic): """Init a 'fanout' publisher. """ if conf.qpid_topology_version == 1: node_name = "%s_fanout" % topic node_opts = {"type": "fanout"} elif conf.qpid_topology_version == 2: node_name = "amq.topic/fanout/%s" % topic node_opts = {} else: raise_invalid_topology_version() super(FanoutPublisher, self).__init__(conf, session, node_name, node_opts) class NotifyPublisher(Publisher): """Publisher class for notifications.""" def __init__(self, conf, session, topic): """Init a 'topic' publisher. """ exchange_name = rpc_amqp.get_control_exchange(conf) node_opts = {"durable": True} if conf.qpid_topology_version == 1: node_name = "%s/%s" % (exchange_name, topic) elif conf.qpid_topology_version == 2: node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) else: raise_invalid_topology_version() super(NotifyPublisher, self).__init__(conf, session, node_name, node_opts) class Connection(object): """Connection object.""" pool = None def __init__(self, conf, server_params=None): if not qpid_messaging: raise ImportError("Failed to import qpid.messaging") self.session = None self.consumers = {} self.consumer_thread = None self.proxy_callbacks = [] self.conf = conf if server_params and 'hostname' in server_params: # NOTE(russellb) This enables support for cast_to_server. server_params['qpid_hosts'] = [ '%s:%d' % (server_params['hostname'], server_params.get('port', 5672)) ] params = { 'qpid_hosts': self.conf.qpid_hosts, 'username': self.conf.qpid_username, 'password': self.conf.qpid_password, } params.update(server_params or {}) self.brokers = params['qpid_hosts'] self.username = params['username'] self.password = params['password'] brokers_count = len(self.brokers) self.next_broker_indices = itertools.cycle(range(brokers_count)) self.connection_create(self.brokers[0]) self.reconnect() def connection_create(self, broker): # Create the connection - this does not open the connection self.connection = qpid_messaging.Connection(broker) # Check if flags are set and if so set them for the connection # before we call open self.connection.username = self.username self.connection.password = self.password self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms # Reconnection is done by self.reconnect() self.connection.reconnect = False self.connection.heartbeat = self.conf.qpid_heartbeat self.connection.transport = self.conf.qpid_protocol self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay def _register_consumer(self, consumer): self.consumers[str(consumer.get_receiver())] = consumer def _lookup_consumer(self, receiver): return self.consumers[str(receiver)] def reconnect(self): """Handles reconnecting and re-establishing sessions and queues.""" attempt = 0 delay = 1 while True: # Close the session if necessary if self.connection.opened(): try: self.connection.close() except qpid_exceptions.MessagingError: pass broker = self.brokers[next(self.next_broker_indices)] try: self.connection_create(broker) self.connection.open() except qpid_exceptions.MessagingError as e: msg_dict = dict(e=e, delay=delay) msg = _("Unable to connect to AMQP server: %(e)s. " "Sleeping %(delay)s seconds") % msg_dict LOG.error(msg) time.sleep(delay) delay = min(2 * delay, 60) else: LOG.info(_('Connected to AMQP server on %s'), broker) break self.session = self.connection.session() if self.consumers: consumers = self.consumers self.consumers = {} for consumer in six.itervalues(consumers): consumer.reconnect(self.session) self._register_consumer(consumer) LOG.debug(_("Re-established AMQP queues")) def ensure(self, error_callback, method, *args, **kwargs): while True: try: return method(*args, **kwargs) except (qpid_exceptions.Empty, qpid_exceptions.MessagingError) as e: if error_callback: error_callback(e) self.reconnect() def close(self): """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() try: self.connection.close() except Exception: # NOTE(dripton) Logging exceptions that happen during cleanup just # causes confusion; there's really nothing useful we can do with # them. pass self.connection = None def reset(self): """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.session.close() self.session = self.connection.session() self.consumers = {} def declare_consumer(self, consumer_cls, topic, callback): """Create a Consumer using the class that was passed in and add it to our list of consumers """ def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.error(_("Failed to declare consumer for topic '%(topic)s': " "%(err_str)s") % log_info) def _declare_consumer(): consumer = consumer_cls(self.conf, self.session, topic, callback) self._register_consumer(consumer) return consumer return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): """Return an iterator that will consume from all queues/consumers.""" def _error_callback(exc): if isinstance(exc, qpid_exceptions.Empty): LOG.debug(_('Timed out waiting for RPC response: %s') % str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % str(exc)) def _consume(): nxt_receiver = self.session.next_receiver(timeout=timeout) try: self._lookup_consumer(nxt_receiver).consume() except Exception: LOG.exception(_("Error processing message. Skipping it.")) for iteration in itertools.count(0): if limit and iteration >= limit: raise StopIteration yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: self.consumer_thread.wait() except greenlet.GreenletExit: pass self.consumer_thread = None def wait_on_proxy_callbacks(self): """Wait for all proxy callback threads to exit.""" for proxy_cb in self.proxy_callbacks: proxy_cb.wait() def publisher_send(self, cls, topic, msg): """Send to a publisher based on the publisher class.""" def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.exception(_("Failed to publish message to topic " "'%(topic)s': %(err_str)s") % log_info) def _publisher_send(): publisher = cls(self.conf, self.session, topic) publisher.send(msg) return self.ensure(_connect_error, _publisher_send) def declare_direct_consumer(self, topic, callback): """Create a 'direct' queue. In nova's use, this is generally a msg_id queue used for responses for call/multicall """ self.declare_consumer(DirectConsumer, topic, callback) def declare_topic_consumer(self, topic, callback=None, queue_name=None, exchange_name=None): """Create a 'topic' consumer.""" self.declare_consumer(functools.partial(TopicConsumer, name=queue_name, exchange_name=exchange_name, ), topic, callback) def declare_fanout_consumer(self, topic, callback): """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): """Send a 'topic' message.""" # # We want to create a message with attributes, e.g. a TTL. We # don't really need to keep 'msg' in its JSON format any longer # so let's create an actual qpid message here and get some # value-add on the go. # # WARNING: Request timeout happens to be in the same units as # qpid's TTL (seconds). If this changes in the future, then this # will need to be altered accordingly. # qpid_message = qpid_messaging.Message(content=msg, ttl=timeout) self.publisher_send(TopicPublisher, topic, qpid_message) def fanout_send(self, topic, msg): """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg) def consume(self, limit=None): """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: six.next(it) except StopIteration: return def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread.""" @excutils.forever_retry_uncaught_exceptions def _consumer_thread(): try: self.consume() except greenlet.GreenletExit: return if self.consumer_thread is None: self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) if fanout: consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb) else: consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb) self._register_consumer(consumer) return consumer def create_worker(self, topic, proxy, pool_name): """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb, name=pool_name) self._register_consumer(consumer) return consumer def join_consumer_pool(self, callback, pool_name, topic, exchange_name=None, ack_on_error=True): """Register as a member of a group of consumers for a given topic from the specified exchange. Exactly one member of a given pool will receive each message. A message will be delivered to multiple pools, if more than one is created. """ callback_wrapper = rpc_amqp.CallbackWrapper( conf=self.conf, callback=callback, connection_pool=rpc_amqp.get_connection_pool(self.conf, Connection), wait_for_consumers=not ack_on_error ) self.proxy_callbacks.append(callback_wrapper) consumer = TopicConsumer(conf=self.conf, session=self.session, topic=topic, callback=callback_wrapper, name=pool_name, exchange_name=exchange_name) self._register_consumer(consumer) return consumer def create_connection(conf, new=True): """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) def multicall(conf, context, topic, msg, timeout=None): """Make a call that returns multiple times.""" return rpc_amqp.multicall( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def call(conf, context, topic, msg, timeout=None): """Sends a message on a topic and wait for a response.""" return rpc_amqp.call( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def cast(conf, context, topic, msg): """Sends a message on a topic without waiting for a response.""" return rpc_amqp.cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast(conf, context, topic, msg): """Sends a message on a fanout exchange without waiting for a response.""" return rpc_amqp.fanout_cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a topic to a specific server.""" return rpc_amqp.cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a fanout exchange to a specific server.""" return rpc_amqp.fanout_cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def notify(conf, context, topic, msg, envelope): """Sends a notification event on a topic.""" return rpc_amqp.notify(conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection), envelope) def cleanup(): return rpc_amqp.cleanup(Connection.pool) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/zmq_receiver.py0000664000567000056700000000221012540642615026474 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet eventlet.monkey_patch() import contextlib import sys from oslo.config import cfg from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import rpc from ceilometer.openstack.common.rpc import impl_zmq CONF = cfg.CONF CONF.register_opts(rpc.rpc_opts) CONF.register_opts(impl_zmq.zmq_opts) def main(): CONF(sys.argv[1:], project='oslo') logging.setup("oslo") with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: reactor.consume_in_thread() reactor.wait() ceilometer-2014.1.5/ceilometer/openstack/common/rpc/__init__.py0000664000567000056700000002452612540642615025556 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright 2011 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A remote procedure call (rpc) abstraction. For some wrappers that add message versioning to rpc, see: rpc.dispatcher rpc.proxy """ from oslo.config import cfg from ceilometer.openstack.common import importutils from ceilometer.openstack.common import log as logging LOG = logging.getLogger(__name__) rpc_opts = [ cfg.StrOpt('rpc_backend', default='%s.impl_kombu' % __package__, help="The messaging module to use, defaults to kombu."), cfg.IntOpt('rpc_thread_pool_size', default=64, help='Size of RPC thread pool'), cfg.IntOpt('rpc_conn_pool_size', default=30, help='Size of RPC connection pool'), cfg.IntOpt('rpc_response_timeout', default=60, help='Seconds to wait for a response from call or multicall'), cfg.IntOpt('rpc_cast_timeout', default=30, help='Seconds to wait before a cast expires (TTL). ' 'Only supported by impl_zmq.'), cfg.ListOpt('allowed_rpc_exception_modules', default=['nova.exception', 'cinder.exception', 'exceptions', ], help='Modules of exceptions that are permitted to be recreated' ' upon receiving exception data from an rpc call.'), cfg.BoolOpt('fake_rabbit', default=False, help='If passed, use a fake RabbitMQ provider'), cfg.StrOpt('control_exchange', default='openstack', help='AMQP exchange to connect to if using RabbitMQ or Qpid'), ] CONF = cfg.CONF CONF.register_opts(rpc_opts) def set_defaults(control_exchange): cfg.set_defaults(rpc_opts, control_exchange=control_exchange) def create_connection(new=True): """Create a connection to the message bus used for rpc. For some example usage of creating a connection and some consumers on that connection, see nova.service. :param new: Whether or not to create a new connection. A new connection will be created by default. If new is False, the implementation is free to return an existing connection from a pool. :returns: An instance of openstack.common.rpc.common.Connection """ return _get_impl().create_connection(CONF, new=new) def call(context, topic, msg, timeout=None): """Invoke a remote method that returns something. :param context: Information that identifies the user that has made this request. :param topic: The topic to send the rpc message to. This correlates to the topic argument of openstack.common.rpc.common.Connection.create_consumer() and only applies when the consumer was created with fanout=False. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :param timeout: int, number of seconds to use for a response timeout. If set, this overrides the rpc_response_timeout option. :returns: A dict from the remote method. :raises: openstack.common.rpc.common.Timeout if a complete response is not received before the timeout is reached. """ return _get_impl().call(CONF, context, topic, msg, timeout) def cast(context, topic, msg): """Invoke a remote method that does not return anything. :param context: Information that identifies the user that has made this request. :param topic: The topic to send the rpc message to. This correlates to the topic argument of openstack.common.rpc.common.Connection.create_consumer() and only applies when the consumer was created with fanout=False. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :returns: None """ return _get_impl().cast(CONF, context, topic, msg) def fanout_cast(context, topic, msg): """Broadcast a remote method invocation with no return. This method will get invoked on all consumers that were set up with this topic name and fanout=True. :param context: Information that identifies the user that has made this request. :param topic: The topic to send the rpc message to. This correlates to the topic argument of openstack.common.rpc.common.Connection.create_consumer() and only applies when the consumer was created with fanout=True. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :returns: None """ return _get_impl().fanout_cast(CONF, context, topic, msg) def multicall(context, topic, msg, timeout=None): """Invoke a remote method and get back an iterator. In this case, the remote method will be returning multiple values in separate messages, so the return values can be processed as the come in via an iterator. :param context: Information that identifies the user that has made this request. :param topic: The topic to send the rpc message to. This correlates to the topic argument of openstack.common.rpc.common.Connection.create_consumer() and only applies when the consumer was created with fanout=False. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :param timeout: int, number of seconds to use for a response timeout. If set, this overrides the rpc_response_timeout option. :returns: An iterator. The iterator will yield a tuple (N, X) where N is an index that starts at 0 and increases by one for each value returned and X is the Nth value that was returned by the remote method. :raises: openstack.common.rpc.common.Timeout if a complete response is not received before the timeout is reached. """ return _get_impl().multicall(CONF, context, topic, msg, timeout) def notify(context, topic, msg, envelope=False): """Send notification event. :param context: Information that identifies the user that has made this request. :param topic: The topic to send the notification to. :param msg: This is a dict of content of event. :param envelope: Set to True to enable message envelope for notifications. :returns: None """ return _get_impl().notify(cfg.CONF, context, topic, msg, envelope) def cleanup(): """Clean up resources in use by implementation. Clean up any resources that have been allocated by the RPC implementation. This is typically open connections to a messaging service. This function would get called before an application using this API exits to allow connections to get torn down cleanly. :returns: None """ return _get_impl().cleanup() def cast_to_server(context, server_params, topic, msg): """Invoke a remote method that does not return anything. :param context: Information that identifies the user that has made this request. :param server_params: Connection information :param topic: The topic to send the notification to. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :returns: None """ return _get_impl().cast_to_server(CONF, context, server_params, topic, msg) def fanout_cast_to_server(context, server_params, topic, msg): """Broadcast to a remote method invocation with no return. :param context: Information that identifies the user that has made this request. :param server_params: Connection information :param topic: The topic to send the notification to. :param msg: This is a dict in the form { "method" : "method_to_invoke", "args" : dict_of_kwargs } :returns: None """ return _get_impl().fanout_cast_to_server(CONF, context, server_params, topic, msg) def queue_get_for(context, topic, host): """Get a queue name for a given topic + host. This function only works if this naming convention is followed on the consumer side, as well. For example, in nova, every instance of the nova-foo service calls create_consumer() for two topics: foo foo. Messages sent to the 'foo' topic are distributed to exactly one instance of the nova-foo service. The services are chosen in a round-robin fashion. Messages sent to the 'foo.' topic are sent to the nova-foo service on . """ return '%s.%s' % (topic, host) if host else topic _RPCIMPL = None def _get_impl(): """Delay import of rpc_backend until configuration is loaded.""" global _RPCIMPL if _RPCIMPL is None: try: _RPCIMPL = importutils.import_module(CONF.rpc_backend) except ImportError: # For backwards compatibility with older nova config. impl = CONF.rpc_backend.replace('nova.rpc', 'nova.openstack.common.rpc') _RPCIMPL = importutils.import_module(impl) return _RPCIMPL ceilometer-2014.1.5/ceilometer/openstack/common/rpc/common.py0000664000567000056700000004424212540642615025304 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright 2011 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import sys import traceback from oslo.config import cfg import six from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import importutils from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import local from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import versionutils CONF = cfg.CONF LOG = logging.getLogger(__name__) _RPC_ENVELOPE_VERSION = '2.0' '''RPC Envelope Version. This version number applies to the top level structure of messages sent out. It does *not* apply to the message payload, which must be versioned independently. For example, when using rpc APIs, a version number is applied for changes to the API being exposed over rpc. This version number is handled in the rpc proxy and dispatcher modules. This version number applies to the message envelope that is used in the serialization done inside the rpc layer. See serialize_msg() and deserialize_msg(). The current message format (version 2.0) is very simple. It is:: { 'oslo.version': , 'oslo.message': } Message format version '1.0' is just considered to be the messages we sent without a message envelope. So, the current message envelope just includes the envelope version. It may eventually contain additional information, such as a signature for the message payload. We will JSON encode the application message payload. The message envelope, which includes the JSON encoded application message body, will be passed down to the messaging libraries as a dict. ''' _VERSION_KEY = 'oslo.version' _MESSAGE_KEY = 'oslo.message' _REMOTE_POSTFIX = '_Remote' class RPCException(Exception): msg_fmt = _("An unknown RPC related exception occurred.") def __init__(self, message=None, **kwargs): self.kwargs = kwargs if not message: try: message = self.msg_fmt % kwargs except Exception: # kwargs doesn't match a variable in the message # log the issue and the kwargs LOG.exception(_('Exception in string format operation')) for name, value in six.iteritems(kwargs): LOG.error("%s: %s" % (name, value)) # at least get the core message out if something happened message = self.msg_fmt super(RPCException, self).__init__(message) class RemoteError(RPCException): """Signifies that a remote class has raised an exception. Contains a string representation of the type of the original exception, the value of the original exception, and the traceback. These are sent to the parent as a joined string so printing the exception contains all of the relevant info. """ msg_fmt = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.") def __init__(self, exc_type=None, value=None, traceback=None): self.exc_type = exc_type self.value = value self.traceback = traceback super(RemoteError, self).__init__(exc_type=exc_type, value=value, traceback=traceback) class Timeout(RPCException): """Signifies that a timeout has occurred. This exception is raised if the rpc_response_timeout is reached while waiting for a response from the remote side. """ msg_fmt = _('Timeout while waiting on RPC response - ' 'topic: "%(topic)s", RPC method: "%(method)s" ' 'info: "%(info)s"') def __init__(self, info=None, topic=None, method=None): """Initiates Timeout object. :param info: Extra info to convey to the user :param topic: The topic that the rpc call was sent to :param rpc_method_name: The name of the rpc method being called """ self.info = info self.topic = topic self.method = method super(Timeout, self).__init__( None, info=info or _(''), topic=topic or _(''), method=method or _('')) class DuplicateMessageError(RPCException): msg_fmt = _("Found duplicate message(%(msg_id)s). Skipping it.") class InvalidRPCConnectionReuse(RPCException): msg_fmt = _("Invalid reuse of an RPC connection.") class UnsupportedRpcVersion(RPCException): msg_fmt = _("Specified RPC version, %(version)s, not supported by " "this endpoint.") class UnsupportedRpcEnvelopeVersion(RPCException): msg_fmt = _("Specified RPC envelope version, %(version)s, " "not supported by this endpoint.") class RpcVersionCapError(RPCException): msg_fmt = _("Specified RPC version cap, %(version_cap)s, is too low") class Connection(object): """A connection, returned by rpc.create_connection(). This class represents a connection to the message bus used for rpc. An instance of this class should never be created by users of the rpc API. Use rpc.create_connection() instead. """ def close(self): """Close the connection. This method must be called when the connection will no longer be used. It will ensure that any resources associated with the connection, such as a network connection, and cleaned up. """ raise NotImplementedError() def create_consumer(self, topic, proxy, fanout=False): """Create a consumer on this connection. A consumer is associated with a message queue on the backend message bus. The consumer will read messages from the queue, unpack them, and dispatch them to the proxy object. The contents of the message pulled off of the queue will determine which method gets called on the proxy object. :param topic: This is a name associated with what to consume from. Multiple instances of a service may consume from the same topic. For example, all instances of nova-compute consume from a queue called "compute". In that case, the messages will get distributed amongst the consumers in a round-robin fashion if fanout=False. If fanout=True, every consumer associated with this topic will get a copy of every message. :param proxy: The object that will handle all incoming messages. :param fanout: Whether or not this is a fanout topic. See the documentation for the topic parameter for some additional comments on this. """ raise NotImplementedError() def create_worker(self, topic, proxy, pool_name): """Create a worker on this connection. A worker is like a regular consumer of messages directed to a topic, except that it is part of a set of such consumers (the "pool") which may run in parallel. Every pool of workers will receive a given message, but only one worker in the pool will be asked to process it. Load is distributed across the members of the pool in round-robin fashion. :param topic: This is a name associated with what to consume from. Multiple instances of a service may consume from the same topic. :param proxy: The object that will handle all incoming messages. :param pool_name: String containing the name of the pool of workers """ raise NotImplementedError() def join_consumer_pool(self, callback, pool_name, topic, exchange_name): """Register as a member of a group of consumers. Uses given topic from the specified exchange. Exactly one member of a given pool will receive each message. A message will be delivered to multiple pools, if more than one is created. :param callback: Callable to be invoked for each message. :type callback: callable accepting one argument :param pool_name: The name of the consumer pool. :type pool_name: str :param topic: The routing topic for desired messages. :type topic: str :param exchange_name: The name of the message exchange where the client should attach. Defaults to the configured exchange. :type exchange_name: str """ raise NotImplementedError() def consume_in_thread(self): """Spawn a thread to handle incoming messages. Spawn a thread that will be responsible for handling all incoming messages for consumers that were set up on this connection. Message dispatching inside of this is expected to be implemented in a non-blocking manner. An example implementation would be having this thread pull messages in for all of the consumers, but utilize a thread pool for dispatching the messages to the proxy objects. """ raise NotImplementedError() def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" SANITIZE = ['_context_auth_token', 'auth_token', 'new_pass'] def _fix_passwords(d): """Sanitizes the password fields in the dictionary.""" for k in six.iterkeys(d): if k.lower().find('password') != -1: d[k] = '' elif k.lower() in SANITIZE: d[k] = '' elif isinstance(d[k], list): for e in d[k]: if isinstance(e, dict): _fix_passwords(e) elif isinstance(d[k], dict): _fix_passwords(d[k]) return d return log_func(msg, _fix_passwords(copy.deepcopy(msg_data))) def serialize_remote_exception(failure_info, log_failure=True): """Prepares exception data to be sent over rpc. Failure_info should be a sys.exc_info() tuple. """ tb = traceback.format_exception(*failure_info) failure = failure_info[1] if log_failure: LOG.error(_("Returning exception %s to caller"), six.text_type(failure)) LOG.error(tb) kwargs = {} if hasattr(failure, 'kwargs'): kwargs = failure.kwargs # NOTE(matiu): With cells, it's possible to re-raise remote, remote # exceptions. Lets turn it back into the original exception type. cls_name = str(failure.__class__.__name__) mod_name = str(failure.__class__.__module__) if (cls_name.endswith(_REMOTE_POSTFIX) and mod_name.endswith(_REMOTE_POSTFIX)): cls_name = cls_name[:-len(_REMOTE_POSTFIX)] mod_name = mod_name[:-len(_REMOTE_POSTFIX)] data = { 'class': cls_name, 'module': mod_name, 'message': six.text_type(failure), 'tb': tb, 'args': failure.args, 'kwargs': kwargs } json_data = jsonutils.dumps(data) return json_data def deserialize_remote_exception(conf, data): failure = jsonutils.loads(str(data)) trace = failure.get('tb', []) message = failure.get('message', "") + "\n" + "\n".join(trace) name = failure.get('class') module = failure.get('module') # NOTE(ameade): We DO NOT want to allow just any module to be imported, in # order to prevent arbitrary code execution. if module not in conf.allowed_rpc_exception_modules: return RemoteError(name, failure.get('message'), trace) try: mod = importutils.import_module(module) klass = getattr(mod, name) if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (AttributeError, TypeError, ImportError): return RemoteError(name, failure.get('message'), trace) ex_type = type(failure) str_override = lambda self: message new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type,), {'__str__': str_override, '__unicode__': str_override}) new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined # Exceptions and not core python exceptions. This is important because # we cannot necessarily change an exception message so we must override # the __str__ method. failure.__class__ = new_ex_type except TypeError: # NOTE(ameade): If a core exception then just add the traceback to the # first exception argument. failure.args = (message,) + failure.args[1:] return failure class CommonRpcContext(object): def __init__(self, **kwargs): self.values = kwargs def __getattr__(self, key): try: return self.values[key] except KeyError: raise AttributeError(key) def to_dict(self): return copy.deepcopy(self.values) @classmethod def from_dict(cls, values): return cls(**values) def deepcopy(self): return self.from_dict(self.to_dict()) def update_store(self): local.store.context = self def elevated(self, read_deleted=None, overwrite=False): """Return a version of this context with admin flag set.""" # TODO(russellb) This method is a bit of a nova-ism. It makes # some assumptions about the data in the request context sent # across rpc, while the rest of this class does not. We could get # rid of this if we changed the nova code that uses this to # convert the RpcContext back to its native RequestContext doing # something like nova.context.RequestContext.from_dict(ctxt.to_dict()) context = self.deepcopy() context.values['is_admin'] = True context.values.setdefault('roles', []) if 'admin' not in context.values['roles']: context.values['roles'].append('admin') if read_deleted is not None: context.values['read_deleted'] = read_deleted return context class ClientException(Exception): """Encapsulates actual exception expected to be hit by a RPC proxy object. Merely instantiating it records the current exception information, which will be passed back to the RPC client without exceptional logging. """ def __init__(self): self._exc_info = sys.exc_info() def catch_client_exception(exceptions, func, *args, **kwargs): try: return func(*args, **kwargs) except Exception as e: if type(e) in exceptions: raise ClientException() else: raise def client_exceptions(*exceptions): """Decorator for manager methods that raise expected exceptions. Marking a Manager method with this decorator allows the declaration of expected exceptions that the RPC layer should not consider fatal, and not log as if they were generated in a real error scenario. Note that this will cause listed exceptions to be wrapped in a ClientException, which is used internally by the RPC layer. """ def outer(func): def inner(*args, **kwargs): return catch_client_exception(exceptions, func, *args, **kwargs) return inner return outer # TODO(sirp): we should deprecate this in favor of # using `versionutils.is_compatible` directly def version_is_compatible(imp_version, version): """Determine whether versions are compatible. :param imp_version: The version implemented :param version: The version requested by an incoming message. """ return versionutils.is_compatible(version, imp_version) def serialize_msg(raw_msg): # NOTE(russellb) See the docstring for _RPC_ENVELOPE_VERSION for more # information about this format. msg = {_VERSION_KEY: _RPC_ENVELOPE_VERSION, _MESSAGE_KEY: jsonutils.dumps(raw_msg)} return msg def deserialize_msg(msg): # NOTE(russellb): Hang on to your hats, this road is about to # get a little bumpy. # # Robustness Principle: # "Be strict in what you send, liberal in what you accept." # # At this point we have to do a bit of guessing about what it # is we just received. Here is the set of possibilities: # # 1) We received a dict. This could be 2 things: # # a) Inspect it to see if it looks like a standard message envelope. # If so, great! # # b) If it doesn't look like a standard message envelope, it could either # be a notification, or a message from before we added a message # envelope (referred to as version 1.0). # Just return the message as-is. # # 2) It's any other non-dict type. Just return it and hope for the best. # This case covers return values from rpc.call() from before message # envelopes were used. (messages to call a method were always a dict) if not isinstance(msg, dict): # See #2 above. return msg base_envelope_keys = (_VERSION_KEY, _MESSAGE_KEY) if not all(map(lambda key: key in msg, base_envelope_keys)): # See #1.b above. return msg # At this point we think we have the message envelope # format we were expecting. (#1.a above) if not version_is_compatible(_RPC_ENVELOPE_VERSION, msg[_VERSION_KEY]): raise UnsupportedRpcEnvelopeVersion(version=msg[_VERSION_KEY]) raw_msg = jsonutils.loads(msg[_MESSAGE_KEY]) return raw_msg ceilometer-2014.1.5/ceilometer/openstack/common/rpc/impl_kombu.py0000664000567000056700000010165312540642615026152 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import itertools import socket import ssl import time import uuid import eventlet import greenlet import kombu import kombu.connection import kombu.entity import kombu.messaging from oslo.config import cfg import six from ceilometer.openstack.common import excutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import network_utils from ceilometer.openstack.common.rpc import amqp as rpc_amqp from ceilometer.openstack.common.rpc import common as rpc_common from ceilometer.openstack.common import sslutils kombu_opts = [ cfg.StrOpt('kombu_ssl_version', default='', help='If SSL is enabled, the SSL version to use. Valid ' 'values are TLSv1, SSLv23 and SSLv3. SSLv2 might ' 'be available on some distributions.' ), cfg.StrOpt('kombu_ssl_keyfile', default='', help='SSL key file (valid only if SSL enabled)'), cfg.StrOpt('kombu_ssl_certfile', default='', help='SSL cert file (valid only if SSL enabled)'), cfg.StrOpt('kombu_ssl_ca_certs', default='', help=('SSL certification authority file ' '(valid only if SSL enabled)')), cfg.FloatOpt('kombu_reconnect_delay', default=1.0, help='How long to wait before reconnecting in response to an ' 'AMQP consumer cancel notification.'), cfg.StrOpt('rabbit_host', default='localhost', help='The RabbitMQ broker address where a single node is used'), cfg.IntOpt('rabbit_port', default=5672, help='The RabbitMQ broker port where a single node is used'), cfg.ListOpt('rabbit_hosts', default=['$rabbit_host:$rabbit_port'], help='RabbitMQ HA cluster host:port pairs'), cfg.BoolOpt('rabbit_use_ssl', default=False, help='Connect over SSL for RabbitMQ'), cfg.StrOpt('rabbit_userid', default='guest', help='The RabbitMQ userid'), cfg.StrOpt('rabbit_password', default='guest', help='The RabbitMQ password', secret=True), cfg.StrOpt('rabbit_virtual_host', default='/', help='The RabbitMQ virtual host'), cfg.IntOpt('rabbit_retry_interval', default=1, help='How frequently to retry connecting with RabbitMQ'), cfg.IntOpt('rabbit_retry_backoff', default=2, help='How long to backoff for between retries when connecting ' 'to RabbitMQ'), cfg.IntOpt('rabbit_max_retries', default=0, help='Maximum number of RabbitMQ connection retries. ' 'Default is 0 (infinite retry count)'), cfg.BoolOpt('rabbit_ha_queues', default=False, help='Use HA queues in RabbitMQ (x-ha-policy: all). ' 'If you change this option, you must wipe the ' 'RabbitMQ database.'), ] cfg.CONF.register_opts(kombu_opts) LOG = rpc_common.LOG def _get_queue_arguments(conf): """Construct the arguments for declaring a queue. If the rabbit_ha_queues option is set, we declare a mirrored queue as described here: http://www.rabbitmq.com/ha.html Setting x-ha-policy to all means that the queue will be mirrored to all nodes in the cluster. """ return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {} class ConsumerBase(object): """Consumer base class.""" def __init__(self, channel, callback, tag, **kwargs): """Declare a queue on an amqp channel. 'channel' is the amqp channel to use 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel queue name, exchange name, and other kombu options are passed in here as a dictionary. """ self.callback = callback self.tag = str(tag) self.kwargs = kwargs self.queue = None self.ack_on_error = kwargs.get('ack_on_error', True) self.reconnect(channel) def reconnect(self, channel): """Re-declare the queue after a rabbit reconnect.""" self.channel = channel self.kwargs['channel'] = channel self.queue = kombu.entity.Queue(**self.kwargs) self.queue.declare() def _callback_handler(self, message, callback): """Call callback with deserialized message. Messages that are processed without exception are ack'ed. If the message processing generates an exception, it will be ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed. """ try: msg = rpc_common.deserialize_msg(message.payload) callback(msg) except Exception: if self.ack_on_error: LOG.exception(_("Failed to process message" " ... skipping it.")) message.ack() else: LOG.exception(_("Failed to process message" " ... will requeue.")) message.requeue() else: message.ack() def consume(self, *args, **kwargs): """Actually declare the consumer on the amqp channel. This will start the flow of messages from the queue. Using the Connection.iterconsume() iterator will process the messages, calling the appropriate callback. If a callback is specified in kwargs, use that. Otherwise, use the callback passed during __init__() If kwargs['nowait'] is True, then this call will block until a message is read. """ options = {'consumer_tag': self.tag} options['nowait'] = kwargs.get('nowait', False) callback = kwargs.get('callback', self.callback) if not callback: raise ValueError("No callback defined") def _callback(raw_message): message = self.channel.message_to_python(raw_message) self._callback_handler(message, callback) self.queue.consume(*args, callback=_callback, **options) def cancel(self): """Cancel the consuming from the queue, if it has started.""" try: self.queue.cancel(self.tag) except KeyError as e: # NOTE(comstud): Kludge to get around a amqplib bug if str(e) != "u'%s'" % self.tag: raise self.queue = None class DirectConsumer(ConsumerBase): """Queue/consumer class for 'direct'.""" def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): """Init a 'direct' queue. 'channel' is the amqp channel to use 'msg_id' is the msg_id to listen on 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel Other kombu options may be passed """ # Default options options = {'durable': False, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': True, 'exclusive': False} options.update(kwargs) exchange = kombu.entity.Exchange(name=msg_id, type='direct', durable=options['durable'], auto_delete=options['auto_delete']) super(DirectConsumer, self).__init__(channel, callback, tag, name=msg_id, exchange=exchange, routing_key=msg_id, **options) class TopicConsumer(ConsumerBase): """Consumer class for 'topic'.""" def __init__(self, conf, channel, topic, callback, tag, name=None, exchange_name=None, **kwargs): """Init a 'topic' queue. :param channel: the amqp channel to use :param topic: the topic to listen on :paramtype topic: str :param callback: the callback to call when messages are received :param tag: a unique ID for the consumer on the channel :param name: optional queue name, defaults to topic :paramtype name: str Other kombu options may be passed as keyword arguments """ # Default options options = {'durable': conf.amqp_durable_queues, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) exchange = kombu.entity.Exchange(name=exchange_name, type='topic', durable=options['durable'], auto_delete=options['auto_delete']) super(TopicConsumer, self).__init__(channel, callback, tag, name=name or topic, exchange=exchange, routing_key=topic, **options) class FanoutConsumer(ConsumerBase): """Consumer class for 'fanout'.""" def __init__(self, conf, channel, topic, callback, tag, **kwargs): """Init a 'fanout' queue. 'channel' is the amqp channel to use 'topic' is the topic to listen on 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel Other kombu options may be passed """ unique = uuid.uuid4().hex exchange_name = '%s_fanout' % topic queue_name = '%s_fanout_%s' % (topic, unique) # Default options options = {'durable': False, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': True, 'exclusive': False} options.update(kwargs) exchange = kombu.entity.Exchange(name=exchange_name, type='fanout', durable=options['durable'], auto_delete=options['auto_delete']) super(FanoutConsumer, self).__init__(channel, callback, tag, name=queue_name, exchange=exchange, routing_key=topic, **options) class Publisher(object): """Base Publisher class.""" def __init__(self, channel, exchange_name, routing_key, **kwargs): """Init the Publisher class with the exchange_name, routing_key, and other options """ self.exchange_name = exchange_name self.routing_key = routing_key self.kwargs = kwargs self.reconnect(channel) def reconnect(self, channel): """Re-establish the Producer after a rabbit reconnection.""" self.exchange = kombu.entity.Exchange(name=self.exchange_name, **self.kwargs) self.producer = kombu.messaging.Producer(exchange=self.exchange, channel=channel, routing_key=self.routing_key) def send(self, msg, timeout=None): """Send a message.""" if timeout: # # AMQP TTL is in milliseconds when set in the header. # self.producer.publish(msg, headers={'ttl': (timeout * 1000)}) else: self.producer.publish(msg) class DirectPublisher(Publisher): """Publisher class for 'direct'.""" def __init__(self, conf, channel, msg_id, **kwargs): """init a 'direct' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': False, 'auto_delete': True, 'exclusive': False} options.update(kwargs) super(DirectPublisher, self).__init__(channel, msg_id, msg_id, type='direct', **options) class TopicPublisher(Publisher): """Publisher class for 'topic'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'topic' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': conf.amqp_durable_queues, 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = rpc_amqp.get_control_exchange(conf) super(TopicPublisher, self).__init__(channel, exchange_name, topic, type='topic', **options) class FanoutPublisher(Publisher): """Publisher class for 'fanout'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'fanout' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': False, 'auto_delete': True, 'exclusive': False} options.update(kwargs) super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic, None, type='fanout', **options) class NotifyPublisher(TopicPublisher): """Publisher class for 'notify'.""" def __init__(self, conf, channel, topic, **kwargs): self.durable = kwargs.pop('durable', conf.amqp_durable_queues) self.queue_arguments = _get_queue_arguments(conf) super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs) def reconnect(self, channel): super(NotifyPublisher, self).reconnect(channel) # NOTE(jerdfelt): Normally the consumer would create the queue, but # we do this to ensure that messages don't get dropped if the # consumer is started after we do queue = kombu.entity.Queue(channel=channel, exchange=self.exchange, durable=self.durable, name=self.routing_key, routing_key=self.routing_key, queue_arguments=self.queue_arguments) queue.declare() class Connection(object): """Connection object.""" pool = None def __init__(self, conf, server_params=None): self.consumers = [] self.consumer_thread = None self.proxy_callbacks = [] self.conf = conf self.max_retries = self.conf.rabbit_max_retries # Try forever? if self.max_retries <= 0: self.max_retries = None self.interval_start = self.conf.rabbit_retry_interval self.interval_stepping = self.conf.rabbit_retry_backoff # max retry-interval = 30 seconds self.interval_max = 30 self.memory_transport = False if server_params is None: server_params = {} # Keys to translate from server_params to kombu params server_params_to_kombu_params = {'username': 'userid'} ssl_params = self._fetch_ssl_params() params_list = [] for adr in self.conf.rabbit_hosts: hostname, port = network_utils.parse_host_port( adr, default_port=self.conf.rabbit_port) params = { 'hostname': hostname, 'port': port, 'userid': self.conf.rabbit_userid, 'password': self.conf.rabbit_password, 'virtual_host': self.conf.rabbit_virtual_host, } for sp_key, value in six.iteritems(server_params): p_key = server_params_to_kombu_params.get(sp_key, sp_key) params[p_key] = value if self.conf.fake_rabbit: params['transport'] = 'memory' if self.conf.rabbit_use_ssl: params['ssl'] = ssl_params params_list.append(params) self.params_list = params_list brokers_count = len(self.params_list) self.next_broker_indices = itertools.cycle(range(brokers_count)) self.memory_transport = self.conf.fake_rabbit self.connection = None self.reconnect() def _fetch_ssl_params(self): """Handles fetching what ssl params should be used for the connection (if any). """ ssl_params = dict() # http://docs.python.org/library/ssl.html - ssl.wrap_socket if self.conf.kombu_ssl_version: ssl_params['ssl_version'] = sslutils.validate_ssl_version( self.conf.kombu_ssl_version) if self.conf.kombu_ssl_keyfile: ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile if self.conf.kombu_ssl_certfile: ssl_params['certfile'] = self.conf.kombu_ssl_certfile if self.conf.kombu_ssl_ca_certs: ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs # We might want to allow variations in the # future with this? ssl_params['cert_reqs'] = ssl.CERT_REQUIRED # Return the extended behavior or just have the default behavior return ssl_params or True def _connect(self, params): """Connect to rabbit. Re-establish any queues that may have been declared before if we are reconnecting. Exceptions should be handled by the caller. """ if self.connection: LOG.info(_("Reconnecting to AMQP server on " "%(hostname)s:%(port)d") % params) try: # NOTE(bogdando): when reconnecting to a RabbitMQ cluster # with mirrored queues in use, the attempt to release the # connection can hang "indefinitely" somewhere deep down # in Kombu. Blocking the thread for a bit prior to # release seems to kludge around the problem where it is # otherwise reproduceable. if self.conf.kombu_reconnect_delay > 0: LOG.info(_("Delaying reconnect for %1.1f seconds...") % self.conf.kombu_reconnect_delay) time.sleep(self.conf.kombu_reconnect_delay) self.connection.release() except self.connection_errors: pass # Setting this in case the next statement fails, though # it shouldn't be doing any network operations, yet. self.connection = None self.connection = kombu.connection.BrokerConnection(**params) self.connection_errors = self.connection.connection_errors if self.memory_transport: # Kludge to speed up tests. self.connection.transport.polling_interval = 0.0 self.consumer_num = itertools.count(1) self.connection.connect() self.channel = self.connection.channel() # work around 'memory' transport bug in 1.1.3 if self.memory_transport: self.channel._new_queue('ae.undeliver') for consumer in self.consumers: consumer.reconnect(self.channel) LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') % params) def reconnect(self): """Handles reconnecting and re-establishing queues. Will retry up to self.max_retries number of times. self.max_retries = 0 means to retry forever. Sleep between tries, starting at self.interval_start seconds, backing off self.interval_stepping number of seconds each attempt. """ attempt = 0 while True: params = self.params_list[next(self.next_broker_indices)] attempt += 1 try: self._connect(params) return except (IOError, self.connection_errors) as e: pass except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for # a protocol response. (See paste link in LP888621) # So, we check all exceptions for 'timeout' in them # and try to reconnect in this case. if 'timeout' not in str(e): raise log_info = {} log_info['err_str'] = str(e) log_info['max_retries'] = self.max_retries log_info.update(params) if self.max_retries and attempt == self.max_retries: msg = _('Unable to connect to AMQP server on ' '%(hostname)s:%(port)d after %(max_retries)d ' 'tries: %(err_str)s') % log_info LOG.error(msg) raise rpc_common.RPCException(msg) if attempt == 1: sleep_time = self.interval_start or 1 elif attempt > 1: sleep_time += self.interval_stepping if self.interval_max: sleep_time = min(sleep_time, self.interval_max) log_info['sleep_time'] = sleep_time LOG.error(_('AMQP server on %(hostname)s:%(port)d is ' 'unreachable: %(err_str)s. Trying again in ' '%(sleep_time)d seconds.') % log_info) time.sleep(sleep_time) def ensure(self, error_callback, method, *args, **kwargs): while True: try: return method(*args, **kwargs) except (self.connection_errors, socket.timeout, IOError) as e: if error_callback: error_callback(e) except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for # a protocol response. (See paste link in LP888621) # So, we check all exceptions for 'timeout' in them # and try to reconnect in this case. if 'timeout' not in str(e): raise if error_callback: error_callback(e) self.reconnect() def get_channel(self): """Convenience call for bin/clear_rabbit_queues.""" return self.channel def close(self): """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.connection.release() self.connection = None def reset(self): """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.channel.close() self.channel = self.connection.channel() # work around 'memory' transport bug in 1.1.3 if self.memory_transport: self.channel._new_queue('ae.undeliver') self.consumers = [] def declare_consumer(self, consumer_cls, topic, callback): """Create a Consumer using the class that was passed in and add it to our list of consumers """ def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.error(_("Failed to declare consumer for topic '%(topic)s': " "%(err_str)s") % log_info) def _declare_consumer(): consumer = consumer_cls(self.conf, self.channel, topic, callback, six.next(self.consumer_num)) self.consumers.append(consumer) return consumer return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): """Return an iterator that will consume from all queues/consumers.""" info = {'do_consume': True} def _error_callback(exc): if isinstance(exc, socket.timeout): LOG.debug(_('Timed out waiting for RPC response: %s') % str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % str(exc)) info['do_consume'] = True def _consume(): if info['do_consume']: queues_head = self.consumers[:-1] # not fanout. queues_tail = self.consumers[-1] # fanout for queue in queues_head: queue.consume(nowait=True) queues_tail.consume(nowait=False) info['do_consume'] = False return self.connection.drain_events(timeout=timeout) for iteration in itertools.count(0): if limit and iteration >= limit: raise StopIteration yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: self.consumer_thread.wait() except greenlet.GreenletExit: pass self.consumer_thread = None def wait_on_proxy_callbacks(self): """Wait for all proxy callback threads to exit.""" for proxy_cb in self.proxy_callbacks: proxy_cb.wait() def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): """Send to a publisher based on the publisher class.""" def _error_callback(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.exception(_("Failed to publish message to topic " "'%(topic)s': %(err_str)s") % log_info) def _publish(): publisher = cls(self.conf, self.channel, topic, **kwargs) publisher.send(msg, timeout) self.ensure(_error_callback, _publish) def declare_direct_consumer(self, topic, callback): """Create a 'direct' queue. In nova's use, this is generally a msg_id queue used for responses for call/multicall """ self.declare_consumer(DirectConsumer, topic, callback) def declare_topic_consumer(self, topic, callback=None, queue_name=None, exchange_name=None, ack_on_error=True): """Create a 'topic' consumer.""" self.declare_consumer(functools.partial(TopicConsumer, name=queue_name, exchange_name=exchange_name, ack_on_error=ack_on_error, ), topic, callback) def declare_fanout_consumer(self, topic, callback): """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): """Send a 'topic' message.""" self.publisher_send(TopicPublisher, topic, msg, timeout) def fanout_send(self, topic, msg): """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) def consume(self, limit=None): """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: six.next(it) except StopIteration: return def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread.""" @excutils.forever_retry_uncaught_exceptions def _consumer_thread(): try: self.consume() except greenlet.GreenletExit: return if self.consumer_thread is None: self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) if fanout: self.declare_fanout_consumer(topic, proxy_cb) else: self.declare_topic_consumer(topic, proxy_cb) def create_worker(self, topic, proxy, pool_name): """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) self.declare_topic_consumer(topic, proxy_cb, pool_name) def join_consumer_pool(self, callback, pool_name, topic, exchange_name=None, ack_on_error=True): """Register as a member of a group of consumers for a given topic from the specified exchange. Exactly one member of a given pool will receive each message. A message will be delivered to multiple pools, if more than one is created. """ callback_wrapper = rpc_amqp.CallbackWrapper( conf=self.conf, callback=callback, connection_pool=rpc_amqp.get_connection_pool(self.conf, Connection), wait_for_consumers=not ack_on_error ) self.proxy_callbacks.append(callback_wrapper) self.declare_topic_consumer( queue_name=pool_name, topic=topic, exchange_name=exchange_name, callback=callback_wrapper, ack_on_error=ack_on_error, ) def create_connection(conf, new=True): """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) def multicall(conf, context, topic, msg, timeout=None): """Make a call that returns multiple times.""" return rpc_amqp.multicall( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def call(conf, context, topic, msg, timeout=None): """Sends a message on a topic and wait for a response.""" return rpc_amqp.call( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def cast(conf, context, topic, msg): """Sends a message on a topic without waiting for a response.""" return rpc_amqp.cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast(conf, context, topic, msg): """Sends a message on a fanout exchange without waiting for a response.""" return rpc_amqp.fanout_cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a topic to a specific server.""" return rpc_amqp.cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a fanout exchange to a specific server.""" return rpc_amqp.fanout_cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def notify(conf, context, topic, msg, envelope): """Sends a notification event on a topic.""" return rpc_amqp.notify( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection), envelope) def cleanup(): return rpc_amqp.cleanup(Connection.pool) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/amqp.py0000664000567000056700000005613412540642615024755 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright 2011 - 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Shared code between AMQP based openstack.common.rpc implementations. The code in this module is shared between the rpc implementations based on AMQP. Specifically, this includes impl_kombu and impl_qpid. impl_carrot also uses AMQP, but is deprecated and predates this code. """ import collections import inspect import sys import uuid from eventlet import greenpool from eventlet import pools from eventlet import queue from eventlet import semaphore from oslo.config import cfg import six from ceilometer.openstack.common import excutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import local from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.rpc import common as rpc_common amqp_opts = [ cfg.BoolOpt('amqp_durable_queues', default=False, deprecated_name='rabbit_durable_queues', deprecated_group='DEFAULT', help='Use durable queues in amqp.'), cfg.BoolOpt('amqp_auto_delete', default=False, help='Auto-delete queues in amqp.'), ] cfg.CONF.register_opts(amqp_opts) UNIQUE_ID = '_unique_id' LOG = logging.getLogger(__name__) class Pool(pools.Pool): """Class that implements a Pool of Connections.""" def __init__(self, conf, connection_cls, *args, **kwargs): self.connection_cls = connection_cls self.conf = conf kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size) kwargs.setdefault("order_as_stack", True) super(Pool, self).__init__(*args, **kwargs) self.reply_proxy = None # TODO(comstud): Timeout connections not used in a while def create(self): LOG.debug(_('Pool creating new connection')) return self.connection_cls(self.conf) def empty(self): while self.free_items: self.get().close() # Force a new connection pool to be created. # Note that this was added due to failing unit test cases. The issue # is the above "while loop" gets all the cached connections from the # pool and closes them, but never returns them to the pool, a pool # leak. The unit tests hang waiting for an item to be returned to the # pool. The unit tests get here via the tearDown() method. In the run # time code, it gets here via cleanup() and only appears in service.py # just before doing a sys.exit(), so cleanup() only happens once and # the leakage is not a problem. self.connection_cls.pool = None _pool_create_sem = semaphore.Semaphore() def get_connection_pool(conf, connection_cls): with _pool_create_sem: # Make sure only one thread tries to create the connection pool. if not connection_cls.pool: connection_cls.pool = Pool(conf, connection_cls) return connection_cls.pool class ConnectionContext(rpc_common.Connection): """The class that is actually returned to the create_connection() caller. This is essentially a wrapper around Connection that supports 'with'. It can also return a new Connection, or one from a pool. The function will also catch when an instance of this class is to be deleted. With that we can return Connections to the pool on exceptions and so forth without making the caller be responsible for catching them. If possible the function makes sure to return a connection to the pool. """ def __init__(self, conf, connection_pool, pooled=True, server_params=None): """Create a new connection, or get one from the pool.""" self.connection = None self.conf = conf self.connection_pool = connection_pool if pooled: self.connection = connection_pool.get() else: self.connection = connection_pool.connection_cls( conf, server_params=server_params) self.pooled = pooled def __enter__(self): """When with ConnectionContext() is used, return self.""" return self def _done(self): """If the connection came from a pool, clean it up and put it back. If it did not come from a pool, close it. """ if self.connection: if self.pooled: # Reset the connection so it's ready for the next caller # to grab from the pool self.connection.reset() self.connection_pool.put(self.connection) else: try: self.connection.close() except Exception: pass self.connection = None def __exit__(self, exc_type, exc_value, tb): """End of 'with' statement. We're done here.""" self._done() def __del__(self): """Caller is done with this connection. Make sure we cleaned up.""" self._done() def close(self): """Caller is done with this connection.""" self._done() def create_consumer(self, topic, proxy, fanout=False): self.connection.create_consumer(topic, proxy, fanout) def create_worker(self, topic, proxy, pool_name): self.connection.create_worker(topic, proxy, pool_name) def join_consumer_pool(self, callback, pool_name, topic, exchange_name, ack_on_error=True): self.connection.join_consumer_pool(callback, pool_name, topic, exchange_name, ack_on_error) def consume_in_thread(self): self.connection.consume_in_thread() def __getattr__(self, key): """Proxy all other calls to the Connection instance.""" if self.connection: return getattr(self.connection, key) else: raise rpc_common.InvalidRPCConnectionReuse() class ReplyProxy(ConnectionContext): """Connection class for RPC replies / callbacks.""" def __init__(self, conf, connection_pool): self._call_waiters = {} self._num_call_waiters = 0 self._num_call_waiters_wrn_threshold = 10 self._reply_q = 'reply_' + uuid.uuid4().hex super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False) self.declare_direct_consumer(self._reply_q, self._process_data) self.consume_in_thread() def _process_data(self, message_data): msg_id = message_data.pop('_msg_id', None) waiter = self._call_waiters.get(msg_id) if not waiter: LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' ', message : %(data)s'), {'msg_id': msg_id, 'data': message_data}) LOG.warn(_('_call_waiters: %s') % str(self._call_waiters)) else: waiter.put(message_data) def add_call_waiter(self, waiter, msg_id): self._num_call_waiters += 1 if self._num_call_waiters > self._num_call_waiters_wrn_threshold: LOG.warn(_('Number of call waiters is greater than warning ' 'threshold: %d. There could be a MulticallProxyWaiter ' 'leak.') % self._num_call_waiters_wrn_threshold) self._num_call_waiters_wrn_threshold *= 2 self._call_waiters[msg_id] = waiter def del_call_waiter(self, msg_id): self._num_call_waiters -= 1 del self._call_waiters[msg_id] def get_reply_q(self): return self._reply_q def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, failure=None, ending=False, log_failure=True): """Sends a reply or an error on the channel signified by msg_id. Failure should be a sys.exc_info() tuple. """ with ConnectionContext(conf, connection_pool) as conn: if failure: failure = rpc_common.serialize_remote_exception(failure, log_failure) msg = {'result': reply, 'failure': failure} if ending: msg['ending'] = True _add_unique_id(msg) # If a reply_q exists, add the msg_id to the reply and pass the # reply_q to direct_send() to use it as the response queue. # Otherwise use the msg_id for backward compatibility. if reply_q: msg['_msg_id'] = msg_id conn.direct_send(reply_q, rpc_common.serialize_msg(msg)) else: conn.direct_send(msg_id, rpc_common.serialize_msg(msg)) class RpcContext(rpc_common.CommonRpcContext): """Context that supports replying to a rpc.call.""" def __init__(self, **kwargs): self.msg_id = kwargs.pop('msg_id', None) self.reply_q = kwargs.pop('reply_q', None) self.conf = kwargs.pop('conf') super(RpcContext, self).__init__(**kwargs) def deepcopy(self): values = self.to_dict() values['conf'] = self.conf values['msg_id'] = self.msg_id values['reply_q'] = self.reply_q return self.__class__(**values) def reply(self, reply=None, failure=None, ending=False, connection_pool=None, log_failure=True): if self.msg_id: msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool, reply, failure, ending, log_failure) if ending: self.msg_id = None def unpack_context(conf, msg): """Unpack context from msg.""" context_dict = {} for key in list(msg.keys()): # NOTE(vish): Some versions of python don't like unicode keys # in kwargs. key = str(key) if key.startswith('_context_'): value = msg.pop(key) context_dict[key[9:]] = value context_dict['msg_id'] = msg.pop('_msg_id', None) context_dict['reply_q'] = msg.pop('_reply_q', None) context_dict['conf'] = conf ctx = RpcContext.from_dict(context_dict) rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict()) return ctx def pack_context(msg, context): """Pack context into msg. Values for message keys need to be less than 255 chars, so we pull context out into a bunch of separate keys. If we want to support more arguments in rabbit messages, we may want to do the same for args at some point. """ if isinstance(context, dict): context_d = dict([('_context_%s' % key, value) for (key, value) in six.iteritems(context)]) else: context_d = dict([('_context_%s' % key, value) for (key, value) in six.iteritems(context.to_dict())]) msg.update(context_d) class _MsgIdCache(object): """This class checks any duplicate messages.""" # NOTE: This value is considered can be a configuration item, but # it is not necessary to change its value in most cases, # so let this value as static for now. DUP_MSG_CHECK_SIZE = 16 def __init__(self, **kwargs): self.prev_msgids = collections.deque([], maxlen=self.DUP_MSG_CHECK_SIZE) def check_duplicate_message(self, message_data): """AMQP consumers may read same message twice when exceptions occur before ack is returned. This method prevents doing it. """ if UNIQUE_ID in message_data: msg_id = message_data[UNIQUE_ID] if msg_id not in self.prev_msgids: self.prev_msgids.append(msg_id) else: raise rpc_common.DuplicateMessageError(msg_id=msg_id) def _add_unique_id(msg): """Add unique_id for checking duplicate messages.""" unique_id = uuid.uuid4().hex msg.update({UNIQUE_ID: unique_id}) LOG.debug(_('UNIQUE_ID is %s.') % (unique_id)) class _ThreadPoolWithWait(object): """Base class for a delayed invocation manager. Used by the Connection class to start up green threads to handle incoming messages. """ def __init__(self, conf, connection_pool): self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size) self.connection_pool = connection_pool self.conf = conf def wait(self): """Wait for all callback threads to exit.""" self.pool.waitall() class CallbackWrapper(_ThreadPoolWithWait): """Wraps a straight callback. Allows it to be invoked in a green thread. """ def __init__(self, conf, callback, connection_pool, wait_for_consumers=False): """Initiates CallbackWrapper object. :param conf: cfg.CONF instance :param callback: a callable (probably a function) :param connection_pool: connection pool as returned by get_connection_pool() :param wait_for_consumers: wait for all green threads to complete and raise the last caught exception, if any. """ super(CallbackWrapper, self).__init__( conf=conf, connection_pool=connection_pool, ) self.callback = callback self.wait_for_consumers = wait_for_consumers self.exc_info = None def _wrap(self, message_data, **kwargs): """Wrap the callback invocation to catch exceptions. """ try: self.callback(message_data, **kwargs) except Exception: self.exc_info = sys.exc_info() def __call__(self, message_data): self.exc_info = None self.pool.spawn_n(self._wrap, message_data) if self.wait_for_consumers: self.pool.waitall() if self.exc_info: six.reraise(self.exc_info[1], None, self.exc_info[2]) class ProxyCallback(_ThreadPoolWithWait): """Calls methods on a proxy object based on method and args.""" def __init__(self, conf, proxy, connection_pool): super(ProxyCallback, self).__init__( conf=conf, connection_pool=connection_pool, ) self.proxy = proxy self.msg_id_cache = _MsgIdCache() def __call__(self, message_data): """Consumer callback to call a method on a proxy object. Parses the message for validity and fires off a thread to call the proxy object method. Message data should be a dictionary with two keys: method: string representing the method to call args: dictionary of arg: value Example: {'method': 'echo', 'args': {'value': 42}} """ # It is important to clear the context here, because at this point # the previous context is stored in local.store.context if hasattr(local.store, 'context'): del local.store.context rpc_common._safe_log(LOG.debug, _('received %s'), message_data) self.msg_id_cache.check_duplicate_message(message_data) ctxt = unpack_context(self.conf, message_data) method = message_data.get('method') args = message_data.get('args', {}) version = message_data.get('version') namespace = message_data.get('namespace') if not method: LOG.warn(_('no method for message: %s') % message_data) ctxt.reply(_('No method for message: %s') % message_data, connection_pool=self.connection_pool) return self.pool.spawn_n(self._process_data, ctxt, version, method, namespace, args) def _process_data(self, ctxt, version, method, namespace, args): """Process a message in a new thread. If the proxy object we have has a dispatch method (see rpc.dispatcher.RpcDispatcher), pass it the version, method, and args and let it dispatch as appropriate. If not, use the old behavior of magically calling the specified method on the proxy we have here. """ ctxt.update_store() try: rval = self.proxy.dispatch(ctxt, version, method, namespace, **args) # Check if the result was a generator if inspect.isgenerator(rval): for x in rval: ctxt.reply(x, None, connection_pool=self.connection_pool) else: ctxt.reply(rval, None, connection_pool=self.connection_pool) # This final None tells multicall that it is done. ctxt.reply(ending=True, connection_pool=self.connection_pool) except rpc_common.ClientException as e: LOG.debug(_('Expected exception during message handling (%s)') % e._exc_info[1]) ctxt.reply(None, e._exc_info, connection_pool=self.connection_pool, log_failure=False) except Exception: # sys.exc_info() is deleted by LOG.exception(). exc_info = sys.exc_info() LOG.error(_('Exception during message handling'), exc_info=exc_info) ctxt.reply(None, exc_info, connection_pool=self.connection_pool) class MulticallProxyWaiter(object): def __init__(self, conf, msg_id, timeout, connection_pool): self._msg_id = msg_id self._timeout = timeout or conf.rpc_response_timeout self._reply_proxy = connection_pool.reply_proxy self._done = False self._got_ending = False self._conf = conf self._dataqueue = queue.LightQueue() # Add this caller to the reply proxy's call_waiters self._reply_proxy.add_call_waiter(self, self._msg_id) self.msg_id_cache = _MsgIdCache() def put(self, data): self._dataqueue.put(data) def done(self): if self._done: return self._done = True # Remove this caller from reply proxy's call_waiters self._reply_proxy.del_call_waiter(self._msg_id) def _process_data(self, data): result = None self.msg_id_cache.check_duplicate_message(data) if data['failure']: failure = data['failure'] result = rpc_common.deserialize_remote_exception(self._conf, failure) elif data.get('ending', False): self._got_ending = True else: result = data['result'] return result def __iter__(self): """Return a result until we get a reply with an 'ending' flag.""" if self._done: raise StopIteration while True: try: data = self._dataqueue.get(timeout=self._timeout) result = self._process_data(data) except queue.Empty: self.done() raise rpc_common.Timeout() except Exception: with excutils.save_and_reraise_exception(): self.done() if self._got_ending: self.done() raise StopIteration if isinstance(result, Exception): self.done() raise result yield result def create_connection(conf, new, connection_pool): """Create a connection.""" return ConnectionContext(conf, connection_pool, pooled=not new) _reply_proxy_create_sem = semaphore.Semaphore() def multicall(conf, context, topic, msg, timeout, connection_pool): """Make a call that returns multiple times.""" LOG.debug(_('Making synchronous call on %s ...'), topic) msg_id = uuid.uuid4().hex msg.update({'_msg_id': msg_id}) LOG.debug(_('MSG_ID is %s') % (msg_id)) _add_unique_id(msg) pack_context(msg, context) with _reply_proxy_create_sem: if not connection_pool.reply_proxy: connection_pool.reply_proxy = ReplyProxy(conf, connection_pool) msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()}) wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool) with ConnectionContext(conf, connection_pool) as conn: conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout) return wait_msg def call(conf, context, topic, msg, timeout, connection_pool): """Sends a message on a topic and wait for a response.""" rv = multicall(conf, context, topic, msg, timeout, connection_pool) # NOTE(vish): return the last result from the multicall rv = list(rv) if not rv: return return rv[-1] def cast(conf, context, topic, msg, connection_pool): """Sends a message on a topic without waiting for a response.""" LOG.debug(_('Making asynchronous cast on %s...'), topic) _add_unique_id(msg) pack_context(msg, context) with ConnectionContext(conf, connection_pool) as conn: conn.topic_send(topic, rpc_common.serialize_msg(msg)) def fanout_cast(conf, context, topic, msg, connection_pool): """Sends a message on a fanout exchange without waiting for a response.""" LOG.debug(_('Making asynchronous fanout cast...')) _add_unique_id(msg) pack_context(msg, context) with ConnectionContext(conf, connection_pool) as conn: conn.fanout_send(topic, rpc_common.serialize_msg(msg)) def cast_to_server(conf, context, server_params, topic, msg, connection_pool): """Sends a message on a topic to a specific server.""" _add_unique_id(msg) pack_context(msg, context) with ConnectionContext(conf, connection_pool, pooled=False, server_params=server_params) as conn: conn.topic_send(topic, rpc_common.serialize_msg(msg)) def fanout_cast_to_server(conf, context, server_params, topic, msg, connection_pool): """Sends a message on a fanout exchange to a specific server.""" _add_unique_id(msg) pack_context(msg, context) with ConnectionContext(conf, connection_pool, pooled=False, server_params=server_params) as conn: conn.fanout_send(topic, rpc_common.serialize_msg(msg)) def notify(conf, context, topic, msg, connection_pool, envelope): """Sends a notification event on a topic.""" LOG.debug(_('Sending %(event_type)s on %(topic)s'), dict(event_type=msg.get('event_type'), topic=topic)) _add_unique_id(msg) pack_context(msg, context) with ConnectionContext(conf, connection_pool) as conn: if envelope: msg = rpc_common.serialize_msg(msg) conn.notify_send(topic, msg) def cleanup(connection_pool): if connection_pool: connection_pool.empty() def get_control_exchange(conf): return conf.control_exchange ceilometer-2014.1.5/ceilometer/openstack/common/rpc/service.py0000664000567000056700000000543412540642615025454 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright 2011 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import rpc from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher from ceilometer.openstack.common import service LOG = logging.getLogger(__name__) class Service(service.Service): """Service object for binaries running on hosts. A service enables rpc by listening to queues based on topic and host. """ def __init__(self, host, topic, manager=None, serializer=None): super(Service, self).__init__() self.host = host self.topic = topic self.serializer = serializer if manager is None: self.manager = self else: self.manager = manager def start(self): super(Service, self).start() self.conn = rpc.create_connection(new=True) LOG.debug(_("Creating Consumer connection for Service %s") % self.topic) dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], self.serializer) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, dispatcher, fanout=False) node_topic = '%s.%s' % (self.topic, self.host) self.conn.create_consumer(node_topic, dispatcher, fanout=False) self.conn.create_consumer(self.topic, dispatcher, fanout=True) # Hook to allow the manager to do other initializations after # the rpc connection is created. if callable(getattr(self.manager, 'initialize_service_hook', None)): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread self.conn.consume_in_thread() def stop(self): # Try to shut the connection down, but if we get any sort of # errors, go ahead and ignore them.. as we're shutting down anyway try: self.conn.close() except Exception: pass super(Service, self).stop() ceilometer-2014.1.5/ceilometer/openstack/common/rpc/proxy.py0000664000567000056700000002236312540642615025175 0ustar jenkinsjenkins00000000000000# Copyright 2012-2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A helper class for proxy objects to remote APIs. For more information about rpc API version numbers, see: rpc/dispatcher.py """ import six from ceilometer.openstack.common import rpc from ceilometer.openstack.common.rpc import common as rpc_common from ceilometer.openstack.common.rpc import serializer as rpc_serializer class RpcProxy(object): """A helper class for rpc clients. This class is a wrapper around the RPC client API. It allows you to specify the topic and API version in a single place. This is intended to be used as a base class for a class that implements the client side of an rpc API. """ # The default namespace, which can be overridden in a subclass. RPC_API_NAMESPACE = None def __init__(self, topic, default_version, version_cap=None, serializer=None): """Initialize an RpcProxy. :param topic: The topic to use for all messages. :param default_version: The default API version to request in all outgoing messages. This can be overridden on a per-message basis. :param version_cap: Optionally cap the maximum version used for sent messages. :param serializer: Optionaly (de-)serialize entities with a provided helper. """ self.topic = topic self.default_version = default_version self.version_cap = version_cap if serializer is None: serializer = rpc_serializer.NoOpSerializer() self.serializer = serializer super(RpcProxy, self).__init__() def _set_version(self, msg, vers): """Helper method to set the version in a message. :param msg: The message having a version added to it. :param vers: The version number to add to the message. """ v = vers if vers else self.default_version if (self.version_cap and not rpc_common.version_is_compatible(self.version_cap, v)): raise rpc_common.RpcVersionCapError(version_cap=self.version_cap) msg['version'] = v def _get_topic(self, topic): """Return the topic to use for a message.""" return topic if topic else self.topic def can_send_version(self, version): """Check to see if a version is compatible with the version cap.""" return (not self.version_cap or rpc_common.version_is_compatible(self.version_cap, version)) @staticmethod def make_namespaced_msg(method, namespace, **kwargs): return {'method': method, 'namespace': namespace, 'args': kwargs} def make_msg(self, method, **kwargs): return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE, **kwargs) def _serialize_msg_args(self, context, kwargs): """Helper method called to serialize message arguments. This calls our serializer on each argument, returning a new set of args that have been serialized. :param context: The request context :param kwargs: The arguments to serialize :returns: A new set of serialized arguments """ new_kwargs = dict() for argname, arg in six.iteritems(kwargs): new_kwargs[argname] = self.serializer.serialize_entity(context, arg) return new_kwargs def call(self, context, msg, topic=None, version=None, timeout=None): """rpc.call() a remote method. :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :param timeout: (Optional) A timeout to use when waiting for the response. If no timeout is specified, a default timeout will be used that is usually sufficient. :returns: The return value from the remote method. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: result = rpc.call(context, real_topic, msg, timeout) return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) def multicall(self, context, msg, topic=None, version=None, timeout=None): """rpc.multicall() a remote method. :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :param timeout: (Optional) A timeout to use when waiting for the response. If no timeout is specified, a default timeout will be used that is usually sufficient. :returns: An iterator that lets you process each of the returned values from the remote method as they arrive. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: result = rpc.multicall(context, real_topic, msg, timeout) return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) def cast(self, context, msg, topic=None, version=None): """rpc.cast() a remote method. :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :returns: None. rpc.cast() does not wait on any return value from the remote method. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast(context, self._get_topic(topic), msg) def fanout_cast(self, context, msg, topic=None, version=None): """rpc.fanout_cast() a remote method. :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :returns: None. rpc.fanout_cast() does not wait on any return value from the remote method. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast(context, self._get_topic(topic), msg) def cast_to_server(self, context, server_params, msg, topic=None, version=None): """rpc.cast_to_server() a remote method. :param context: The request context :param server_params: Server parameters. See rpc.cast_to_server() for details. :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :returns: None. rpc.cast_to_server() does not wait on any return values. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast_to_server(context, server_params, self._get_topic(topic), msg) def fanout_cast_to_server(self, context, server_params, msg, topic=None, version=None): """rpc.fanout_cast_to_server() a remote method. :param context: The request context :param server_params: Server parameters. See rpc.cast_to_server() for details. :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. :param version: (Optional) Override the requested API version in this message. :returns: None. rpc.fanout_cast_to_server() does not wait on any return values. """ self._set_version(msg, version) msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast_to_server(context, server_params, self._get_topic(topic), msg) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/dispatcher.py0000664000567000056700000001555012540642615026142 0ustar jenkinsjenkins00000000000000# Copyright 2012 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Code for rpc message dispatching. Messages that come in have a version number associated with them. RPC API version numbers are in the form: Major.Minor For a given message with version X.Y, the receiver must be marked as able to handle messages of version A.B, where: A = X B >= Y The Major version number would be incremented for an almost completely new API. The Minor version number would be incremented for backwards compatible changes to an existing API. A backwards compatible change could be something like adding a new method, adding an argument to an existing method (but not requiring it), or changing the type for an existing argument (but still handling the old type as well). The conversion over to a versioned API must be done on both the client side and server side of the API at the same time. However, as the code stands today, there can be both versioned and unversioned APIs implemented in the same code base. EXAMPLES ======== Nova was the first project to use versioned rpc APIs. Consider the compute rpc API as an example. The client side is in nova/compute/rpcapi.py and the server side is in nova/compute/manager.py. Example 1) Adding a new method. ------------------------------- Adding a new method is a backwards compatible change. It should be added to nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to X.Y+1. On the client side, the new method in nova/compute/rpcapi.py should have a specific version specified to indicate the minimum API version that must be implemented for the method to be supported. For example:: def get_host_uptime(self, ctxt, host): topic = _compute_topic(self.topic, ctxt, host, None) return self.call(ctxt, self.make_msg('get_host_uptime'), topic, version='1.1') In this case, version '1.1' is the first version that supported the get_host_uptime() method. Example 2) Adding a new parameter. ---------------------------------- Adding a new parameter to an rpc method can be made backwards compatible. The RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped. The implementation of the method must not expect the parameter to be present.:: def some_remote_method(self, arg1, arg2, newarg=None): # The code needs to deal with newarg=None for cases # where an older client sends a message without it. pass On the client side, the same changes should be made as in example 1. The minimum version that supports the new parameter should be specified. """ import six from ceilometer.openstack.common.rpc import common as rpc_common from ceilometer.openstack.common.rpc import serializer as rpc_serializer class RpcDispatcher(object): """Dispatch rpc messages according to the requested API version. This class can be used as the top level 'manager' for a service. It contains a list of underlying managers that have an API_VERSION attribute. """ def __init__(self, callbacks, serializer=None): """Initialize the rpc dispatcher. :param callbacks: List of proxy objects that are an instance of a class with rpc methods exposed. Each proxy object should have an RPC_API_VERSION attribute. :param serializer: The Serializer object that will be used to deserialize arguments before the method call and to serialize the result after it returns. """ self.callbacks = callbacks if serializer is None: serializer = rpc_serializer.NoOpSerializer() self.serializer = serializer super(RpcDispatcher, self).__init__() def _deserialize_args(self, context, kwargs): """Helper method called to deserialize args before dispatch. This calls our serializer on each argument, returning a new set of args that have been deserialized. :param context: The request context :param kwargs: The arguments to be deserialized :returns: A new set of deserialized args """ new_kwargs = dict() for argname, arg in six.iteritems(kwargs): new_kwargs[argname] = self.serializer.deserialize_entity(context, arg) return new_kwargs def dispatch(self, ctxt, version, method, namespace, **kwargs): """Dispatch a message based on a requested version. :param ctxt: The request context :param version: The requested API version from the incoming message :param method: The method requested to be called by the incoming message. :param namespace: The namespace for the requested method. If None, the dispatcher will look for a method on a callback object with no namespace set. :param kwargs: A dict of keyword arguments to be passed to the method. :returns: Whatever is returned by the underlying method that gets called. """ if not version: version = '1.0' had_compatible = False for proxyobj in self.callbacks: # Check for namespace compatibility try: cb_namespace = proxyobj.RPC_API_NAMESPACE except AttributeError: cb_namespace = None if namespace != cb_namespace: continue # Check for version compatibility try: rpc_api_version = proxyobj.RPC_API_VERSION except AttributeError: rpc_api_version = '1.0' is_compatible = rpc_common.version_is_compatible(rpc_api_version, version) had_compatible = had_compatible or is_compatible if not hasattr(proxyobj, method): continue if is_compatible: kwargs = self._deserialize_args(ctxt, kwargs) result = getattr(proxyobj, method)(ctxt, **kwargs) return self.serializer.serialize_entity(ctxt, result) if had_compatible: raise AttributeError("No such RPC function '%s'" % method) else: raise rpc_common.UnsupportedRpcVersion(version=version) ceilometer-2014.1.5/ceilometer/openstack/common/rpc/matchmaker_redis.py0000664000567000056700000001132212540642615027307 0ustar jenkinsjenkins00000000000000# Copyright 2013 Cloudscaling Group, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The MatchMaker classes should accept a Topic or Fanout exchange key and return keys for direct exchanges, per (approximate) AMQP parlance. """ from oslo.config import cfg from ceilometer.openstack.common import importutils from ceilometer.openstack.common import log as logging from ceilometer.openstack.common.rpc import matchmaker as mm_common redis = importutils.try_import('redis') matchmaker_redis_opts = [ cfg.StrOpt('host', default='127.0.0.1', help='Host to locate redis'), cfg.IntOpt('port', default=6379, help='Use this port to connect to redis host.'), cfg.StrOpt('password', default=None, help='Password for Redis server. (optional)'), ] CONF = cfg.CONF opt_group = cfg.OptGroup(name='matchmaker_redis', title='Options for Redis-based MatchMaker') CONF.register_group(opt_group) CONF.register_opts(matchmaker_redis_opts, opt_group) LOG = logging.getLogger(__name__) class RedisExchange(mm_common.Exchange): def __init__(self, matchmaker): self.matchmaker = matchmaker self.redis = matchmaker.redis super(RedisExchange, self).__init__() class RedisTopicExchange(RedisExchange): """Exchange where all topic keys are split, sending to second half. i.e. "compute.host" sends a message to "compute" running on "host" """ def run(self, topic): while True: member_name = self.redis.srandmember(topic) if not member_name: # If this happens, there are no # longer any members. break if not self.matchmaker.is_alive(topic, member_name): continue host = member_name.split('.', 1)[1] return [(member_name, host)] return [] class RedisFanoutExchange(RedisExchange): """Return a list of all hosts.""" def run(self, topic): topic = topic.split('~', 1)[1] hosts = self.redis.smembers(topic) good_hosts = filter( lambda host: self.matchmaker.is_alive(topic, host), hosts) return [(x, x.split('.', 1)[1]) for x in good_hosts] class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase): """MatchMaker registering and looking-up hosts with a Redis server.""" def __init__(self): super(MatchMakerRedis, self).__init__() if not redis: raise ImportError("Failed to import module redis.") self.redis = redis.Redis( host=CONF.matchmaker_redis.host, port=CONF.matchmaker_redis.port, password=CONF.matchmaker_redis.password) self.add_binding(mm_common.FanoutBinding(), RedisFanoutExchange(self)) self.add_binding(mm_common.DirectBinding(), mm_common.DirectExchange()) self.add_binding(mm_common.TopicBinding(), RedisTopicExchange(self)) def ack_alive(self, key, host): topic = "%s.%s" % (key, host) if not self.redis.expire(topic, CONF.matchmaker_heartbeat_ttl): # If we could not update the expiration, the key # might have been pruned. Re-register, creating a new # key in Redis. self.register(self.topic_host[host], host) def is_alive(self, topic, host): if self.redis.ttl(host) == -1: self.expire(topic, host) return False return True def expire(self, topic, host): with self.redis.pipeline() as pipe: pipe.multi() pipe.delete(host) pipe.srem(topic, host) pipe.execute() def backend_register(self, key, key_host): with self.redis.pipeline() as pipe: pipe.multi() pipe.sadd(key, key_host) # No value is needed, we just # care if it exists. Sets aren't viable # because only keys can expire. pipe.set(key_host, '') pipe.execute() def backend_unregister(self, key, key_host): with self.redis.pipeline() as pipe: pipe.multi() pipe.srem(key, key_host) pipe.delete(key_host) pipe.execute() ceilometer-2014.1.5/ceilometer/openstack/common/units.py0000664000567000056700000000163012540642615024364 0ustar jenkinsjenkins00000000000000# Copyright 2013 IBM Corp # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit constants """ #Binary unit constants. Ki = 1024 Mi = 1024 ** 2 Gi = 1024 ** 3 Ti = 1024 ** 4 Pi = 1024 ** 5 Ei = 1024 ** 6 Zi = 1024 ** 7 Yi = 1024 ** 8 #Decimal unit constants. k = 1000 M = 1000 ** 2 G = 1000 ** 3 T = 1000 ** 4 P = 1000 ** 5 E = 1000 ** 6 Z = 1000 ** 7 Y = 1000 ** 8 ceilometer-2014.1.5/ceilometer/openstack/common/py3kcompat/0000775000567000056700000000000012540643223024736 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/py3kcompat/__init__.py0000664000567000056700000000000012540642615027041 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/openstack/common/py3kcompat/urlutils.py0000664000567000056700000000356012540642615027203 0ustar jenkinsjenkins00000000000000# # Copyright 2013 Canonical Ltd. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """ Python2/Python3 compatibility layer for OpenStack """ import six if six.PY3: # python3 import urllib.error import urllib.parse import urllib.request urlencode = urllib.parse.urlencode urljoin = urllib.parse.urljoin quote = urllib.parse.quote quote_plus = urllib.parse.quote_plus parse_qsl = urllib.parse.parse_qsl unquote = urllib.parse.unquote unquote_plus = urllib.parse.unquote_plus urlparse = urllib.parse.urlparse urlsplit = urllib.parse.urlsplit urlunsplit = urllib.parse.urlunsplit SplitResult = urllib.parse.SplitResult urlopen = urllib.request.urlopen URLError = urllib.error.URLError pathname2url = urllib.request.pathname2url else: # python2 import urllib import urllib2 import urlparse urlencode = urllib.urlencode quote = urllib.quote quote_plus = urllib.quote_plus unquote = urllib.unquote unquote_plus = urllib.unquote_plus parse = urlparse parse_qsl = parse.parse_qsl urljoin = parse.urljoin urlparse = parse.urlparse urlsplit = parse.urlsplit urlunsplit = parse.urlunsplit SplitResult = parse.SplitResult urlopen = urllib2.urlopen URLError = urllib2.URLError pathname2url = urllib.pathname2url ceilometer-2014.1.5/ceilometer/openstack/common/strutils.py0000664000567000056700000001657612540642615025132 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ System-level utilities and helper functions. """ import re import sys import unicodedata import six from ceilometer.openstack.common.gettextutils import _ # Used for looking up extensions of text # to their 'multiplied' byte amount BYTE_MULTIPLIERS = { '': 1, 't': 1024 ** 4, 'g': 1024 ** 3, 'm': 1024 ** 2, 'k': 1024, } BYTE_REGEX = re.compile(r'(^-?\d+)(\D*)') TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes') FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no') SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]") SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+") def int_from_bool_as_string(subject): """Interpret a string as a boolean and return either 1 or 0. Any string value in: ('True', 'true', 'On', 'on', '1') is interpreted as a boolean True. Useful for JSON-decoded stuff and config file parsing """ return bool_from_string(subject) and 1 or 0 def bool_from_string(subject, strict=False, default=False): """Interpret a string as a boolean. A case-insensitive match is performed such that strings matching 't', 'true', 'on', 'y', 'yes', or '1' are considered True and, when `strict=False`, anything else returns the value specified by 'default'. Useful for JSON-decoded stuff and config file parsing. If `strict=True`, unrecognized values, including None, will raise a ValueError which is useful when parsing values passed in from an API call. Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. """ if not isinstance(subject, six.string_types): subject = str(subject) lowered = subject.strip().lower() if lowered in TRUE_STRINGS: return True elif lowered in FALSE_STRINGS: return False elif strict: acceptable = ', '.join( "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS)) msg = _("Unrecognized value '%(val)s', acceptable values are:" " %(acceptable)s") % {'val': subject, 'acceptable': acceptable} raise ValueError(msg) else: return default def safe_decode(text, incoming=None, errors='strict'): """Decodes incoming str using `incoming` if they're not already unicode. :param incoming: Text's current encoding :param errors: Errors handling policy. See here for valid values http://docs.python.org/2/library/codecs.html :returns: text or a unicode `incoming` encoded representation of it. :raises TypeError: If text is not an instance of str """ if not isinstance(text, six.string_types): raise TypeError("%s can't be decoded" % type(text)) if isinstance(text, six.text_type): return text if not incoming: incoming = (sys.stdin.encoding or sys.getdefaultencoding()) try: return text.decode(incoming, errors) except UnicodeDecodeError: # Note(flaper87) If we get here, it means that # sys.stdin.encoding / sys.getdefaultencoding # didn't return a suitable encoding to decode # text. This happens mostly when global LANG # var is not set correctly and there's no # default encoding. In this case, most likely # python will use ASCII or ANSI encoders as # default encodings but they won't be capable # of decoding non-ASCII characters. # # Also, UTF-8 is being used since it's an ASCII # extension. return text.decode('utf-8', errors) def safe_encode(text, incoming=None, encoding='utf-8', errors='strict'): """Encodes incoming str/unicode using `encoding`. If incoming is not specified, text is expected to be encoded with current python's default encoding. (`sys.getdefaultencoding`) :param incoming: Text's current encoding :param encoding: Expected encoding for text (Default UTF-8) :param errors: Errors handling policy. See here for valid values http://docs.python.org/2/library/codecs.html :returns: text or a bytestring `encoding` encoded representation of it. :raises TypeError: If text is not an instance of str """ if not isinstance(text, six.string_types): raise TypeError("%s can't be encoded" % type(text)) if not incoming: incoming = (sys.stdin.encoding or sys.getdefaultencoding()) if isinstance(text, six.text_type): if six.PY3: return text.encode(encoding, errors).decode(incoming) else: return text.encode(encoding, errors) elif text and encoding != incoming: # Decode text before encoding it with `encoding` text = safe_decode(text, incoming, errors) if six.PY3: return text.encode(encoding, errors).decode(incoming) else: return text.encode(encoding, errors) return text def to_bytes(text, default=0): """Converts a string into an integer of bytes. Looks at the last characters of the text to determine what conversion is needed to turn the input text into a byte number. Supports "B, K(B), M(B), G(B), and T(B)". (case insensitive) :param text: String input for bytes size conversion. :param default: Default return value when text is blank. """ match = BYTE_REGEX.search(text) if match: magnitude = int(match.group(1)) mult_key_org = match.group(2) if not mult_key_org: return magnitude elif text: msg = _('Invalid string format: %s') % text raise TypeError(msg) else: return default mult_key = mult_key_org.lower().replace('b', '', 1) multiplier = BYTE_MULTIPLIERS.get(mult_key) if multiplier is None: msg = _('Unknown byte multiplier: %s') % mult_key_org raise TypeError(msg) return magnitude * multiplier def to_slug(value, incoming=None, errors="strict"): """Normalize string. Convert to lowercase, remove non-word characters, and convert spaces to hyphens. Inspired by Django's `slugify` filter. :param value: Text to slugify :param incoming: Text's current encoding :param errors: Errors handling policy. See here for valid values http://docs.python.org/2/library/codecs.html :returns: slugified unicode representation of `value` :raises TypeError: If text is not an instance of str """ value = safe_decode(value, incoming, errors) # NOTE(aababilov): no need to use safe_(encode|decode) here: # encodings are always "ascii", error handling is always "ignore" # and types are always known (first: unicode; second: str) value = unicodedata.normalize("NFKD", value).encode( "ascii", "ignore").decode("ascii") value = SLUGIFY_STRIP_RE.sub("", value).strip().lower() return SLUGIFY_HYPHENATE_RE.sub("-", value) ceilometer-2014.1.5/ceilometer/openstack/common/fileutils.py0000664000567000056700000000751712540642615025234 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import errno import os import tempfile from ceilometer.openstack.common import excutils from ceilometer.openstack.common.gettextutils import _ from ceilometer.openstack.common import log as logging LOG = logging.getLogger(__name__) _FILE_CACHE = {} def ensure_tree(path): """Create a directory (and any ancestor directories required) :param path: Directory to create """ try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST: if not os.path.isdir(path): raise else: raise def read_cached_file(filename, force_reload=False): """Read from a file if it has been modified. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. """ global _FILE_CACHE if force_reload and filename in _FILE_CACHE: del _FILE_CACHE[filename] reloaded = False mtime = os.path.getmtime(filename) cache_info = _FILE_CACHE.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): LOG.debug(_("Reloading cached file %s") % filename) with open(filename) as fap: cache_info['data'] = fap.read() cache_info['mtime'] = mtime reloaded = True return (reloaded, cache_info['data']) def delete_if_exists(path, remove=os.unlink): """Delete a file, but ignore file not found error. :param path: File to delete :param remove: Optional function to remove passed path """ try: remove(path) except OSError as e: if e.errno != errno.ENOENT: raise @contextlib.contextmanager def remove_path_on_error(path, remove=delete_if_exists): """Protect code that wants to operate on PATH atomically. Any exception will cause PATH to be removed. :param path: File to work with :param remove: Optional function to remove passed path """ try: yield except Exception: with excutils.save_and_reraise_exception(): remove(path) def file_open(*args, **kwargs): """Open file see built-in file() documentation for more details Note: The reason this is kept in a separate module is to easily be able to provide a stub module that doesn't alter system state at all (for unit tests) """ return file(*args, **kwargs) def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): """Create temporary file or use existing file. This util is needed for creating temporary file with specified content, suffix and prefix. If path is not None, it will be used for writing content. If the path doesn't exist it'll be created. :param content: content for temporary file. :param path: same as parameter 'dir' for mkstemp :param suffix: same as parameter 'suffix' for mkstemp :param prefix: same as parameter 'prefix' for mkstemp For example: it can be used in database tests for creating configuration files. """ if path: ensure_tree(path) (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) try: os.write(fd, content) finally: os.close(fd) return path ceilometer-2014.1.5/ceilometer/openstack/__init__.py0000664000567000056700000000000012540642613023455 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/central/0000775000567000056700000000000012540643223021015 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/central/plugin.py0000664000567000056700000000156312540642615022676 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base class for plugins used by the central agent. """ from ceilometer import plugin class CentralPollster(plugin.PollsterBase): """Base class for plugins that support the polling API.""" ceilometer-2014.1.5/ceilometer/central/manager.py0000664000567000056700000000362112540642615023007 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012-2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystoneclient.v2_0 import client as ksclient from oslo.config import cfg from ceilometer import agent from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log cfg.CONF.import_group('service_credentials', 'ceilometer.service') LOG = log.getLogger(__name__) class AgentManager(agent.AgentManager): def __init__(self): super(AgentManager, self).__init__('central') def interval_task(self, task): try: self.keystone = ksclient.Client( username=cfg.CONF.service_credentials.os_username, password=cfg.CONF.service_credentials.os_password, tenant_id=cfg.CONF.service_credentials.os_tenant_id, tenant_name=cfg.CONF.service_credentials.os_tenant_name, cacert=cfg.CONF.service_credentials.os_cacert, auth_url=cfg.CONF.service_credentials.os_auth_url, region_name=cfg.CONF.service_credentials.os_region_name, insecure=cfg.CONF.service_credentials.insecure) except Exception as e: LOG.error(_('Skip interval_task because Keystone error: %s'), e) return super(AgentManager, self).interval_task(task) ceilometer-2014.1.5/ceilometer/central/__init__.py0000664000567000056700000000000012540642615023120 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/publisher/0000775000567000056700000000000012540643223021362 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/publisher/test.py0000664000567000056700000000231312540642615022716 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Publish a sample in memory, useful for testing """ from ceilometer import publisher class TestPublisher(publisher.PublisherBase): """Publisher used in unit testing.""" def __init__(self, parsed_url): self.samples = [] self.calls = 0 def publish_samples(self, context, samples): """Send a metering message for publishing :param context: Execution context from the service or RPC call :param samples: Samples from pipeline after transformation """ self.samples.extend(samples) self.calls += 1 ceilometer-2014.1.5/ceilometer/publisher/udp.py0000664000567000056700000000460312540642615022533 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 eNovance # # Author: Julien Danjou , # Tyaptin Ilya # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Publish a sample using an UDP mechanism """ import socket import msgpack from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import network_utils from ceilometer import publisher from ceilometer.publisher import utils cfg.CONF.import_opt('udp_port', 'ceilometer.collector', group='collector') LOG = log.getLogger(__name__) class UDPPublisher(publisher.PublisherBase): def __init__(self, parsed_url): self.host, self.port = network_utils.parse_host_port( parsed_url.netloc, default_port=cfg.CONF.collector.udp_port) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def publish_samples(self, context, samples): """Send a metering message for publishing :param context: Execution context from the service or RPC call :param samples: Samples from pipeline after transformation """ for sample in samples: msg = utils.meter_message_from_counter( sample, cfg.CONF.publisher.metering_secret) host = self.host port = self.port LOG.debug(_("Publishing sample %(msg)s over UDP to " "%(host)s:%(port)d") % {'msg': msg, 'host': host, 'port': port}) try: self.socket.sendto(msgpack.dumps(msg), (self.host, self.port)) except Exception as e: LOG.warn(_("Unable to send sample over UDP")) LOG.exception(e) ceilometer-2014.1.5/ceilometer/publisher/rpc.py0000664000567000056700000001702012540642615022524 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Publish a sample using the preferred RPC mechanism. """ import itertools import operator import six.moves.urllib.parse as urlparse from oslo.config import cfg from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common import rpc from ceilometer import publisher from ceilometer.publisher import utils LOG = log.getLogger(__name__) METER_PUBLISH_OPTS = [ cfg.StrOpt('metering_topic', default='metering', help='The topic that ceilometer uses for metering messages.', deprecated_group="DEFAULT", ), ] def register_opts(config): """Register the options for publishing metering messages. """ config.register_opts(METER_PUBLISH_OPTS, group="publisher_rpc") register_opts(cfg.CONF) def import_backend_retry_config(): """Import the retry config option native to the configured rpc backend (if such a native config option exists). """ cfg.CONF.import_opt('rpc_backend', 'ceilometer.openstack.common.rpc') kombu = 'ceilometer.openstack.common.rpc.impl_kombu' if cfg.CONF.rpc_backend == kombu: try: cfg.CONF.import_opt('rabbit_max_retries', kombu) except ImportError: pass import_backend_retry_config() def override_backend_retry_config(value): """Override the retry config option native to the configured rpc backend (if such a native config option exists). :param value: the value to override """ # TODO(eglynn): ultimately we should add to olso a more generic concept # of retry config (i.e. not specific to an individual AMQP provider) # see: https://bugs.launchpad.net/ceilometer/+bug/1244698 kombu = 'ceilometer.openstack.common.rpc.impl_kombu' if cfg.CONF.rpc_backend == kombu: if 'rabbit_max_retries' in cfg.CONF: cfg.CONF.set_override('rabbit_max_retries', value) class RPCPublisher(publisher.PublisherBase): def __init__(self, parsed_url): options = urlparse.parse_qs(parsed_url.query) # the values of the option is a list of url params values # only take care of the latest one if the option # is provided more than once self.per_meter_topic = bool(int( options.get('per_meter_topic', [0])[-1])) self.target = options.get('target', ['record_metering_data'])[0] self.policy = options.get('policy', ['default'])[-1] self.max_queue_length = int(options.get( 'max_queue_length', [1024])[-1]) self.local_queue = [] if self.policy in ['queue', 'drop']: LOG.info(_('Publishing policy set to %s, ' 'override backend retry config to 1') % self.policy) override_backend_retry_config(1) elif self.policy == 'default': LOG.info(_('Publishing policy set to %s') % self.policy) else: LOG.warn(_('Publishing policy is unknown (%s) force to default') % self.policy) self.policy = 'default' def publish_samples(self, context, samples): """Publish samples on RPC. :param context: Execution context from the service or RPC call. :param samples: Samples from pipeline after transformation. """ meters = [ utils.meter_message_from_counter( sample, cfg.CONF.publisher.metering_secret) for sample in samples ] topic = cfg.CONF.publisher_rpc.metering_topic msg = { 'method': self.target, 'version': '1.0', 'args': {'data': meters}, } self.local_queue.append((context, topic, msg)) if self.per_meter_topic: for meter_name, meter_list in itertools.groupby( sorted(meters, key=operator.itemgetter('counter_name')), operator.itemgetter('counter_name')): msg = { 'method': self.target, 'version': '1.0', 'args': {'data': list(meter_list)}, } topic_name = topic + '.' + meter_name LOG.audit(_('Publishing %(m)d samples on %(n)s') % ( {'m': len(msg['args']['data']), 'n': topic_name})) self.local_queue.append((context, topic_name, msg)) self.flush() def flush(self): #note(sileht): # IO of the rpc stuff in handled by eventlet, # this is why the self.local_queue, is emptied before processing the # queue and the remaining messages in the queue are added to # self.local_queue after in case of a other call have already added # something in the self.local_queue queue = self.local_queue self.local_queue = [] self.local_queue = self._process_queue(queue, self.policy) + \ self.local_queue if self.policy == 'queue': self._check_queue_length() def _check_queue_length(self): queue_length = len(self.local_queue) if queue_length > self.max_queue_length > 0: count = queue_length - self.max_queue_length self.local_queue = self.local_queue[count:] LOG.warn(_("Publisher max local_queue length is exceeded, " "dropping %d oldest samples") % count) @staticmethod def _process_queue(queue, policy): #note(sileht): # the behavior of rpc.cast call depends of rabbit_max_retries # if rabbit_max_retries <= 0: # it returns only if the msg has been sent on the amqp queue # if rabbit_max_retries > 0: # it raises a exception if rabbitmq is unreachable # # Ugly, but actually the oslo.rpc do a sys.exit(1) instead of a # RPCException, so we catch both until a correct behavior is # implemented in oslo # # the default policy just respect the rabbitmq configuration # nothing special is done if rabbit_max_retries <= 0 # and exception is reraised if rabbit_max_retries > 0 while queue: context, topic, msg = queue[0] try: rpc.cast(context, topic, msg) except (SystemExit, rpc.common.RPCException): samples = sum([len(m['args']['data']) for n, n, m in queue]) if policy == 'queue': LOG.warn(_("Failed to publish %d samples, queue them"), samples) return queue elif policy == 'drop': LOG.warn(_("Failed to publish %d samples, dropping them"), samples) return [] # default, occur only if rabbit_max_retries > 0 raise else: queue.pop(0) return [] ceilometer-2014.1.5/ceilometer/publisher/__init__.py0000664000567000056700000000274312540642615023505 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 Intel Corp. # Copyright © 2013 eNovance # # Author: Yunhong Jiang # Julien Danjou # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import six from stevedore import driver from ceilometer.openstack.common import network_utils def get_publisher(url, namespace='ceilometer.publisher'): """Get publisher driver and load it. :param URL: URL for the publisher :param namespace: Namespace to use to look for drivers. """ parse_result = network_utils.urlsplit(url) loaded_driver = driver.DriverManager(namespace, parse_result.scheme) return loaded_driver.driver(parse_result) @six.add_metaclass(abc.ABCMeta) class PublisherBase(object): """Base class for plugins that publish the sampler.""" def __init__(self, parsed_url): pass @abc.abstractmethod def publish_samples(self, context, samples): "Publish samples into final conduit." ceilometer-2014.1.5/ceilometer/publisher/file.py0000664000567000056700000000713212540642615022662 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp # # Author: Tong Li # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import logging.handlers import six.moves.urllib.parse as urlparse from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer import publisher LOG = log.getLogger(__name__) class FilePublisher(publisher.PublisherBase): """Publisher metering data to file. The publisher which records metering data into a file. The file name and location should be configured in ceilometer pipeline configuration file. If a file name and location is not specified, this File Publisher will not log any meters other than log a warning in Ceilometer log file. To enable this publisher, add the following section to file /etc/ceilometer/publisher.yaml or simply add it to an existing pipeline. - name: meter_file interval: 600 counters: - "*" transformers: publishers: - file:///var/test?max_bytes=10000000&backup_count=5 File path is required for this publisher to work properly. If max_bytes or backup_count is missing, FileHandler will be used to save the metering data. If max_bytes and backup_count are present, RotatingFileHandler will be used to save the metering data. """ def __init__(self, parsed_url): super(FilePublisher, self).__init__(parsed_url) self.publisher_logger = None path = parsed_url.path if not path or path.lower() == 'file': LOG.error(_('The path for the file publisher is required')) return rfh = None max_bytes = 0 backup_count = 0 # Handling other configuration options in the query string if parsed_url.query: params = urlparse.parse_qs(parsed_url.query) if params.get('max_bytes') and params.get('backup_count'): try: max_bytes = int(params.get('max_bytes')[0]) backup_count = int(params.get('backup_count')[0]) except ValueError: LOG.error(_('max_bytes and backup_count should be ' 'numbers.')) return # create rotating file handler rfh = logging.handlers.RotatingFileHandler( path, encoding='utf8', maxBytes=max_bytes, backupCount=backup_count) self.publisher_logger = logging.Logger('publisher.file') self.publisher_logger.propagate = False self.publisher_logger.setLevel(logging.INFO) rfh.setLevel(logging.INFO) self.publisher_logger.addHandler(rfh) def publish_samples(self, context, samples): """Send a metering message for publishing :param context: Execution context from the service or RPC call :param samples: Samples from pipeline after transformation """ if self.publisher_logger: for sample in samples: self.publisher_logger.info(sample.as_dict()) ceilometer-2014.1.5/ceilometer/publisher/utils.py0000664000567000056700000000606212540642615023104 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # Tyaptin Ilya # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utils for publishers """ import hashlib import hmac from oslo.config import cfg from ceilometer import utils METER_PUBLISH_OPTS = [ cfg.StrOpt('metering_secret', secret=True, default='change this or be hacked', help='Secret value for signing metering messages.', deprecated_opts=[cfg.DeprecatedOpt("metering_secret", "DEFAULT"), cfg.DeprecatedOpt("metering_secret", "publisher_rpc")] ), ] def register_opts(config): """Register the options for publishing metering messages. """ config.register_opts(METER_PUBLISH_OPTS, group="publisher") register_opts(cfg.CONF) def compute_signature(message, secret): """Return the signature for a message dictionary. """ digest_maker = hmac.new(secret, '', hashlib.sha256) for name, value in utils.recursive_keypairs(message): if name == 'message_signature': # Skip any existing signature value, which would not have # been part of the original message. continue digest_maker.update(name) digest_maker.update(unicode(value).encode('utf-8')) return digest_maker.hexdigest() def verify_signature(message, secret): """Check the signature in the message against the value computed from the rest of the contents. """ old_sig = message.get('message_signature') new_sig = compute_signature(message, secret) return new_sig == old_sig def meter_message_from_counter(sample, secret): """Make a metering message ready to be published or stored. Returns a dictionary containing a metering message for a notification message and a Sample instance. """ msg = {'source': sample.source, 'counter_name': sample.name, 'counter_type': sample.type, 'counter_unit': sample.unit, 'counter_volume': sample.volume, 'user_id': sample.user_id, 'project_id': sample.project_id, 'resource_id': sample.resource_id, 'timestamp': sample.timestamp, 'resource_metadata': sample.resource_metadata, 'message_id': sample.id, } msg['message_signature'] = compute_signature(msg, secret) return msg ceilometer-2014.1.5/ceilometer/api/0000775000567000056700000000000012540643223020136 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/controllers/0000775000567000056700000000000012540643223022504 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/controllers/v2.py0000664000567000056700000026343712540642615023430 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # Copyright 2013 IBM Corp. # Copyright © 2013 eNovance # Copyright Ericsson AB 2013. All rights reserved # # Authors: Doug Hellmann # Angus Salkeld # Eoghan Glynn # Julien Danjou # Ildiko Vancsa # Balazs Gibizer # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Version 2 of the API. """ import ast import base64 import copy import croniter import datetime import functools import inspect import json import jsonschema import pytz import uuid from oslo.config import cfg import pecan from pecan import rest import six import wsme from wsme import types as wtypes import wsmeext.pecan as wsme_pecan from ceilometer.api import acl from ceilometer.openstack.common import context from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log from ceilometer.openstack.common.notifier import api as notify from ceilometer.openstack.common import strutils from ceilometer.openstack.common import timeutils from ceilometer import sample from ceilometer import storage from ceilometer import utils LOG = log.getLogger(__name__) ALARM_API_OPTS = [ cfg.BoolOpt('record_history', default=True, help='Record alarm change events.' ), ] cfg.CONF.register_opts(ALARM_API_OPTS, group='alarm') state_kind = ["ok", "alarm", "insufficient data"] state_kind_enum = wtypes.Enum(str, *state_kind) operation_kind = wtypes.Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') class ClientSideError(wsme.exc.ClientSideError): def __init__(self, error, status_code=400): pecan.response.translatable_error = error super(ClientSideError, self).__init__(error, status_code) class EntityNotFound(ClientSideError): def __init__(self, entity, id): super(EntityNotFound, self).__init__( _("%(entity)s %(id)s Not Found") % {'entity': entity, 'id': id}, status_code=404) class AdvEnum(wtypes.wsproperty): """Handle default and mandatory for wtypes.Enum """ def __init__(self, name, *args, **kwargs): self._name = '_advenum_%s' % name self._default = kwargs.pop('default', None) mandatory = kwargs.pop('mandatory', False) enum = wtypes.Enum(*args, **kwargs) super(AdvEnum, self).__init__(datatype=enum, fget=self._get, fset=self._set, mandatory=mandatory) def _get(self, parent): if hasattr(parent, self._name): value = getattr(parent, self._name) return value or self._default return self._default def _set(self, parent, value): if self.datatype.validate(value): setattr(parent, self._name, value) class CronType(wtypes.UserType): """A user type that represents a cron format.""" basetype = six.string_types name = 'cron' @staticmethod def validate(value): # raises ValueError if invalid croniter.croniter(value) return value class _Base(wtypes.Base): @classmethod def from_db_model(cls, m): return cls(**(m.as_dict())) @classmethod def from_db_and_links(cls, m, links): return cls(links=links, **(m.as_dict())) def as_dict(self, db_model): valid_keys = inspect.getargspec(db_model.__init__)[0] if 'self' in valid_keys: valid_keys.remove('self') return self.as_dict_from_keys(valid_keys) def as_dict_from_keys(self, keys): return dict((k, getattr(self, k)) for k in keys if hasattr(self, k) and getattr(self, k) != wsme.Unset) class Link(_Base): """A link representation """ href = wtypes.text "The url of a link" rel = wtypes.text "The name of a link" @classmethod def sample(cls): return cls(href=('http://localhost:8777/v2/meters/volume?' 'q.field=resource_id&' 'q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36'), rel='volume' ) class Query(_Base): """Query filter. """ # The data types supported by the query. _supported_types = ['integer', 'float', 'string', 'boolean'] # Functions to convert the data field to the correct type. _type_converters = {'integer': int, 'float': float, 'boolean': functools.partial( strutils.bool_from_string, strict=True), 'string': six.text_type, 'datetime': timeutils.parse_isotime} _op = None # provide a default def get_op(self): return self._op or 'eq' def set_op(self, value): self._op = value field = wtypes.text "The name of the field to test" #op = wsme.wsattr(operation_kind, default='eq') # this ^ doesn't seem to work. op = wsme.wsproperty(operation_kind, get_op, set_op) "The comparison operator. Defaults to 'eq'." value = wtypes.text "The value to compare against the stored data" type = wtypes.text "The data type of value to compare against the stored data" def __repr__(self): # for logging calls return '' % (self.field, self.op, self.value, self.type) @classmethod def sample(cls): return cls(field='resource_id', op='eq', value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', type='string' ) def as_dict(self): return self.as_dict_from_keys(['field', 'op', 'type', 'value']) def _get_value_as_type(self, forced_type=None): """Convert metadata value to the specified data type. This method is called during metadata query to help convert the querying metadata to the data type specified by user. If there is no data type given, the metadata will be parsed by ast.literal_eval to try to do a smart converting. NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised from wsmeext/sphinxext.py. It's OK to call it outside the Query class. Because the "public" side of that class is actually the outside of the API, and the "private" side is the API implementation. The method is only used in the API implementation, so it's OK. :returns: metadata value converted with the specified data type. """ type = forced_type or self.type try: converted_value = self.value if not type: try: converted_value = ast.literal_eval(self.value) except (ValueError, SyntaxError): msg = _('Failed to convert the metadata value %s' ' automatically') % (self.value) LOG.debug(msg) else: if type not in self._supported_types: # Types must be explicitly declared so the # correct type converter may be used. Subclasses # of Query may define _supported_types and # _type_converters to define their own types. raise TypeError() converted_value = self._type_converters[type](self.value) except ValueError: msg = _('Failed to convert the value %(value)s' ' to the expected data type %(type)s.') % \ {'value': self.value, 'type': type} raise ClientSideError(msg) except TypeError: msg = _('The data type %(type)s is not supported. The supported' ' data type list is: %(supported)s') % \ {'type': type, 'supported': self._supported_types} raise ClientSideError(msg) except Exception: msg = _('Unexpected exception converting %(value)s to' ' the expected data type %(type)s.') % \ {'value': self.value, 'type': type} raise ClientSideError(msg) return converted_value class ProjectNotAuthorized(ClientSideError): def __init__(self, id, aspect='project'): params = dict(aspect=aspect, id=id) super(ProjectNotAuthorized, self).__init__( _("Not Authorized to access %(aspect)s %(id)s") % params, status_code=401) def _get_auth_project(on_behalf_of=None): # when an alarm is created by an admin on behalf of another tenant # we must ensure for: # - threshold alarm, that an implicit query constraint on project_id is # added so that admin-level visibility on statistics is not leaked # - combination alarm, that alarm ids verification is scoped to # alarms owned by the alarm project. # hence for null auth_project (indicating admin-ness) we check if # the creating tenant differs from the tenant on whose behalf the # alarm is being created auth_project = acl.get_limited_to_project(pecan.request.headers) created_by = pecan.request.headers.get('X-Project-Id') is_admin = auth_project is None if is_admin and on_behalf_of != created_by: auth_project = on_behalf_of return auth_project def _sanitize_query(query, db_func, on_behalf_of=None): '''Check the query to see if: 1) the request is coming from admin - then allow full visibility 2) non-admin - make sure that the query includes the requester's project. ''' q = copy.copy(query) auth_project = _get_auth_project(on_behalf_of) if auth_project: _verify_query_segregation(q, auth_project) proj_q = [i for i in q if i.field == 'project_id'] valid_keys = inspect.getargspec(db_func)[0] if not proj_q and 'on_behalf_of' not in valid_keys: # The user is restricted, but they didn't specify a project # so add it for them. q.append(Query(field='project_id', op='eq', value=auth_project)) return q def _verify_query_segregation(query, auth_project=None): '''Ensure non-admin queries are not constrained to another project.''' auth_project = (auth_project or acl.get_limited_to_project(pecan.request.headers)) if auth_project: for q in query: if q.field == 'project_id' and (auth_project != q.value or q.op != 'eq'): raise ProjectNotAuthorized(q.value) def _validate_query(query, db_func, internal_keys=[], allow_timestamps=True): """Validates the syntax of the query and verifies that the query request is authorized for the included project. :param query: Query expression that should be validated :param db_func: the function on the storage level, of which arguments will form the valid_keys list, which defines the valid fields for a query expression :param internal_keys: internally used field names, that should not be used for querying :param allow_timestamps: defines whether the timestamp-based constraint is applicable for this query or not :returns: None, if the query is valid :raises InvalidInput: if an operator is not supported for a given field :raises InvalidInput: if timestamp constraints are allowed, but search_offset was included without timestamp constraint :raises: UnknownArgument: if a field name is not a timestamp field, nor in the list of valid keys """ _verify_query_segregation(query) valid_keys = inspect.getargspec(db_func)[0] internal_keys.append('self') valid_keys = set(valid_keys) - set(internal_keys) translation = {'user_id': 'user', 'project_id': 'project', 'resource_id': 'resource'} has_timestamp_query = _validate_timestamp_fields(query, 'timestamp', ('lt', 'le', 'gt', 'ge'), allow_timestamps) has_search_offset_query = _validate_timestamp_fields(query, 'search_offset', ('eq'), allow_timestamps) if has_search_offset_query and not has_timestamp_query: raise wsme.exc.InvalidInput('field', 'search_offset', "search_offset cannot be used without " + "timestamp") def _is_field_metadata(field): return (field.startswith('metadata.') or field.startswith('resource_metadata.')) for i in query: if i.field not in ('timestamp', 'search_offset'): key = translation.get(i.field, i.field) operator = i.op if (key in valid_keys or _is_field_metadata(i.field)): if operator == 'eq': if key == 'enabled': i._get_value_as_type('boolean') elif _is_field_metadata(key): i._get_value_as_type() else: raise wsme.exc.InvalidInput('op', i.op, 'unimplemented operator for ' '%s' % i.field) else: msg = ("unrecognized field in query: %s, " "valid keys: %s") % (query, valid_keys) raise wsme.exc.UnknownArgument(key, msg) def _validate_timestamp_fields(query, field_name, operator_list, allow_timestamps): """Validates the timestamp related constraints in a query expression, if there are any. :param query: query expression that may contain the timestamp fields :param field_name: timestamp name, which should be checked (timestamp, search_offset) :param operator_list: list of operators that are supported for that timestamp, which was specified in the parameter field_name :param allow_timestamps: defines whether the timestamp-based constraint is applicable to this query or not :returns: True, if there was a timestamp constraint, containing a timestamp field named as defined in field_name, in the query and it was allowed and syntactically correct. :returns: False, if there wasn't timestamp constraint, containing a timestamp field named as defined in field_name, in the query :raises InvalidInput: if an operator is unsupported for a given timestamp field :raises UnknownArgument: if the timestamp constraint is not allowed in the query """ for item in query: if item.field == field_name: #If *timestamp* or *search_offset* field was specified in the #query, but timestamp is not supported on that resource, on #which the query was invoked, then raise an exception. if not allow_timestamps: raise wsme.exc.UnknownArgument(field_name, "not valid for " + "this resource") if item.op not in operator_list: raise wsme.exc.InvalidInput('op', item.op, 'unimplemented operator for %s' % item.field) return True return False def _query_to_kwargs(query, db_func, internal_keys=[], allow_timestamps=True): _validate_query(query, db_func, internal_keys=internal_keys, allow_timestamps=allow_timestamps) query = _sanitize_query(query, db_func) internal_keys.append('self') valid_keys = set(inspect.getargspec(db_func)[0]) - set(internal_keys) translation = {'user_id': 'user', 'project_id': 'project', 'resource_id': 'resource'} stamp = {} metaquery = {} kwargs = {} for i in query: if i.field == 'timestamp': if i.op in ('lt', 'le'): stamp['end_timestamp'] = i.value stamp['end_timestamp_op'] = i.op elif i.op in ('gt', 'ge'): stamp['start_timestamp'] = i.value stamp['start_timestamp_op'] = i.op else: if i.op == 'eq': if i.field == 'search_offset': stamp['search_offset'] = i.value elif i.field == 'enabled': kwargs[i.field] = i._get_value_as_type('boolean') elif i.field.startswith('metadata.'): metaquery[i.field] = i._get_value_as_type() elif i.field.startswith('resource_metadata.'): metaquery[i.field[9:]] = i._get_value_as_type() else: key = translation.get(i.field, i.field) kwargs[key] = i.value if metaquery and 'metaquery' in valid_keys: kwargs['metaquery'] = metaquery if stamp: q_ts = _get_query_timestamps(stamp) if 'start' in valid_keys: kwargs['start'] = q_ts['query_start'] kwargs['end'] = q_ts['query_end'] elif 'start_timestamp' in valid_keys: kwargs['start_timestamp'] = q_ts['query_start'] kwargs['end_timestamp'] = q_ts['query_end'] if 'start_timestamp_op' in stamp: kwargs['start_timestamp_op'] = stamp['start_timestamp_op'] if 'end_timestamp_op' in stamp: kwargs['end_timestamp_op'] = stamp['end_timestamp_op'] return kwargs def _validate_groupby_fields(groupby_fields): """Checks that the list of groupby fields from request is valid and if all fields are valid, returns fields with duplicates removed """ # NOTE(terriyu): Currently, metadata fields are not supported in our # group by statistics implementation valid_fields = set(['user_id', 'resource_id', 'project_id', 'source']) invalid_fields = set(groupby_fields) - valid_fields if invalid_fields: raise wsme.exc.UnknownArgument(invalid_fields, "Invalid groupby fields") # Remove duplicate fields # NOTE(terriyu): This assumes that we don't care about the order of the # group by fields. return list(set(groupby_fields)) def _get_query_timestamps(args={}): """Return any optional timestamp information in the request. Determine the desired range, if any, from the GET arguments. Set up the query range using the specified offset. [query_start ... start_timestamp ... end_timestamp ... query_end] Returns a dictionary containing: query_start: First timestamp to use for query start_timestamp: start_timestamp parameter from request query_end: Final timestamp to use for query end_timestamp: end_timestamp parameter from request search_offset: search_offset parameter from request """ search_offset = int(args.get('search_offset', 0)) start_timestamp = args.get('start_timestamp') if start_timestamp: start_timestamp = timeutils.parse_isotime(start_timestamp) start_timestamp = start_timestamp.replace(tzinfo=None) query_start = (start_timestamp - datetime.timedelta(minutes=search_offset)) else: query_start = None end_timestamp = args.get('end_timestamp') if end_timestamp: end_timestamp = timeutils.parse_isotime(end_timestamp) end_timestamp = end_timestamp.replace(tzinfo=None) query_end = end_timestamp + datetime.timedelta(minutes=search_offset) else: query_end = None return {'query_start': query_start, 'query_end': query_end, 'start_timestamp': start_timestamp, 'end_timestamp': end_timestamp, 'search_offset': search_offset, } def _flatten_metadata(metadata): """Return flattened resource metadata with flattened nested structures (except nested sets) and with all values converted to unicode strings. """ if metadata: # After changing recursive_keypairs` output we need to keep # flattening output unchanged. # Example: recursive_keypairs({'a': {'b':{'c':'d'}}}, '.') # output before: a.b:c=d # output now: a.b.c=d # So to keep the first variant just replace all dots except the first return dict((k.replace('.', ':').replace(':', '.', 1), unicode(v)) for k, v in utils.recursive_keypairs(metadata, separator='.') if type(v) is not set) return {} def _make_link(rel_name, url, type, type_arg, query=None): query_str = '' if query: query_str = '?q.field=%s&q.value=%s' % (query['field'], query['value']) return Link(href=('%s/v2/%s/%s%s') % (url, type, type_arg, query_str), rel=rel_name) def _send_notification(event, payload): notification = event.replace(" ", "_") notification = "alarm.%s" % notification notify.notify(None, notify.publisher_id("ceilometer.api"), notification, notify.INFO, payload) class OldSample(_Base): """A single measurement for a given meter and resource. This class is deprecated in favor of Sample. """ source = wtypes.text "The ID of the source that identifies where the sample comes from" counter_name = wsme.wsattr(wtypes.text, mandatory=True) "The name of the meter" # FIXME(dhellmann): Make this meter_name? counter_type = wsme.wsattr(wtypes.text, mandatory=True) "The type of the meter (see :ref:`measurements`)" # FIXME(dhellmann): Make this meter_type? counter_unit = wsme.wsattr(wtypes.text, mandatory=True) "The unit of measure for the value in counter_volume" # FIXME(dhellmann): Make this meter_unit? counter_volume = wsme.wsattr(float, mandatory=True) "The actual measured value" user_id = wtypes.text "The ID of the user who last triggered an update to the resource" project_id = wtypes.text "The ID of the project or tenant that owns the resource" resource_id = wsme.wsattr(wtypes.text, mandatory=True) "The ID of the :class:`Resource` for which the measurements are taken" timestamp = datetime.datetime "UTC date and time when the measurement was made" recorded_at = datetime.datetime "When the sample has been recorded." resource_metadata = {wtypes.text: wtypes.text} "Arbitrary metadata associated with the resource" message_id = wtypes.text "A unique identifier for the sample" def __init__(self, counter_volume=None, resource_metadata={}, timestamp=None, **kwds): if counter_volume is not None: counter_volume = float(counter_volume) resource_metadata = _flatten_metadata(resource_metadata) # this is to make it easier for clients to pass a timestamp in if timestamp and isinstance(timestamp, basestring): timestamp = timeutils.parse_isotime(timestamp) super(OldSample, self).__init__(counter_volume=counter_volume, resource_metadata=resource_metadata, timestamp=timestamp, **kwds) if self.resource_metadata in (wtypes.Unset, None): self.resource_metadata = {} @classmethod def sample(cls): return cls(source='openstack', counter_name='instance', counter_type='gauge', counter_unit='instance', counter_volume=1, resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', project_id='35b17138-b364-4e6a-a131-8f3099c5be68', user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff', recorded_at=datetime.datetime.utcnow(), timestamp=datetime.datetime.utcnow(), resource_metadata={'name1': 'value1', 'name2': 'value2'}, message_id='5460acce-4fd6-480d-ab18-9735ec7b1996', ) class Statistics(_Base): """Computed statistics for a query. """ groupby = {wtypes.text: wtypes.text} "Dictionary of field names for group, if groupby statistics are requested" unit = wtypes.text "The unit type of the data set" min = float "The minimum volume seen in the data" max = float "The maximum volume seen in the data" avg = float "The average of all of the volume values seen in the data" sum = float "The total of all of the volume values seen in the data" count = int "The number of samples seen" aggregate = {wtypes.text: float} "The selectable aggregate value(s)" duration = float "The difference, in seconds, between the oldest and newest timestamp" duration_start = datetime.datetime "UTC date and time of the earliest timestamp, or the query start time" duration_end = datetime.datetime "UTC date and time of the oldest timestamp, or the query end time" period = int "The difference, in seconds, between the period start and end" period_start = datetime.datetime "UTC date and time of the period start" period_end = datetime.datetime "UTC date and time of the period end" def __init__(self, start_timestamp=None, end_timestamp=None, **kwds): super(Statistics, self).__init__(**kwds) self._update_duration(start_timestamp, end_timestamp) def _update_duration(self, start_timestamp, end_timestamp): # "Clamp" the timestamps we return to the original time # range, excluding the offset. if (start_timestamp and self.duration_start and self.duration_start < start_timestamp): self.duration_start = start_timestamp LOG.debug(_('clamping min timestamp to range')) if (end_timestamp and self.duration_end and self.duration_end > end_timestamp): self.duration_end = end_timestamp LOG.debug(_('clamping max timestamp to range')) # If we got valid timestamps back, compute a duration in seconds. # # If the min > max after clamping then we know the # timestamps on the samples fell outside of the time # range we care about for the query, so treat them as # "invalid." # # If the timestamps are invalid, return None as a # sentinel indicating that there is something "funny" # about the range. if (self.duration_start and self.duration_end and self.duration_start <= self.duration_end): self.duration = timeutils.delta_seconds(self.duration_start, self.duration_end) else: self.duration_start = self.duration_end = self.duration = None @classmethod def sample(cls): return cls(unit='GiB', min=1, max=9, avg=4.5, sum=45, count=10, duration_start=datetime.datetime(2013, 1, 4, 16, 42), duration_end=datetime.datetime(2013, 1, 4, 16, 47), period=7200, period_start=datetime.datetime(2013, 1, 4, 16, 00), period_end=datetime.datetime(2013, 1, 4, 18, 00), ) class Aggregate(_Base): func = wsme.wsattr(wtypes.text, mandatory=True) "The aggregation function name" param = wsme.wsattr(wtypes.text, default=None) "The paramter to the aggregation function" def __init__(self, **kwargs): super(Aggregate, self).__init__(**kwargs) @staticmethod def validate(aggregate): return aggregate @classmethod def sample(cls): return cls(func='cardinality', param='resource_id') class MeterController(rest.RestController): """Manages operations on a single meter. """ _custom_actions = { 'statistics': ['GET'], } def __init__(self, meter_name): pecan.request.context['meter_name'] = meter_name self.meter_name = meter_name @wsme_pecan.wsexpose([OldSample], [Query], int) def get_all(self, q=[], limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ if limit and limit < 0: raise ClientSideError(_("Limit must be positive")) kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit) ] @wsme_pecan.wsexpose([OldSample], body=[OldSample]) def post(self, samples): """Post a list of new Samples to Telemetry. :param samples: a list of samples within the request body. """ now = timeutils.utcnow() auth_project = acl.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get('X-Project-Id') def_user_id = pecan.request.headers.get('X-User-Id') published_samples = [] for s in samples: if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput('counter_name', s.counter_name, 'should be %s' % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput('message_id', s.message_id, 'The message_id must not be set') if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput('counter_type', s.counter_type, 'The counter type must be: ' + ', '.join(sample.TYPES)) s.user_id = (s.user_id or def_user_id) s.project_id = (s.project_id or def_project_id) s.source = '%s:%s' % (s.project_id, (s.source or def_source)) s.timestamp = (s.timestamp or now) if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = 'can not post samples to other projects' raise wsme.exc.InvalidInput('project_id', s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator='.'), source=s.source) published_samples.append(published_sample) s.message_id = published_sample.id with pecan.request.pipeline_manager.publisher( context.get_admin_context()) as publisher: publisher(published_samples) return samples @wsme_pecan.wsexpose([Statistics], [Query], [unicode], int, [Aggregate]) def statistics(self, q=[], groupby=[], period=None, aggregate=[]): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ if period and period < 0: raise ClientSideError(_("Period must be positive.")) kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) computed = pecan.request.storage_conn.get_meter_statistics(f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) return [Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed] class Meter(_Base): """One category of measurements. """ name = wtypes.text "The unique name for the meter" type = wtypes.Enum(str, *sample.TYPES) "The meter type (see :ref:`measurements`)" unit = wtypes.text "The unit of measure" resource_id = wtypes.text "The ID of the :class:`Resource` for which the measurements are taken" project_id = wtypes.text "The ID of the project or tenant that owns the resource" user_id = wtypes.text "The ID of the user who last triggered an update to the resource" source = wtypes.text "The ID of the source that identifies where the meter comes from" meter_id = wtypes.text "The unique identifier for the meter" def __init__(self, **kwargs): meter_id = base64.encodestring('%s+%s' % (kwargs['resource_id'], kwargs['name'])) kwargs['meter_id'] = meter_id super(Meter, self).__init__(**kwargs) @classmethod def sample(cls): return cls(name='instance', type='gauge', unit='instance', resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', project_id='35b17138-b364-4e6a-a131-8f3099c5be68', user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff', source='openstack', ) class MetersController(rest.RestController): """Works on meters.""" @pecan.expose() def _lookup(self, meter_name, *remainder): return MeterController(meter_name), remainder @wsme_pecan.wsexpose([Meter], [Query]) def get_all(self, q=[]): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ #Timestamp field is not supported for Meter queries kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters, allow_timestamps=False) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(**kwargs)] class Sample(_Base): """One measurement.""" id = wtypes.text "The unique identifier for the sample." meter = wtypes.text "The meter name this sample is for." type = wtypes.Enum(str, *sample.TYPES) "The meter type (see :ref:`measurements`)" unit = wtypes.text "The unit of measure." volume = float "The metered value." user_id = wtypes.text "The user this sample was taken for." project_id = wtypes.text "The project this sample was taken for." resource_id = wtypes.text "The :class:`Resource` this sample was taken for." source = wtypes.text "The source that identifies where the sample comes from." timestamp = datetime.datetime "When the sample has been generated." recorded_at = datetime.datetime "When the sample has been recorded." metadata = {wtypes.text: wtypes.text} "Arbitrary metadata associated with the sample." @classmethod def from_db_model(cls, m): return cls(id=m.message_id, meter=m.counter_name, type=m.counter_type, unit=m.counter_unit, volume=m.counter_volume, user_id=m.user_id, project_id=m.project_id, resource_id=m.resource_id, source=m.source, timestamp=m.timestamp, recorded_at=m.recorded_at, metadata=_flatten_metadata(m.resource_metadata)) @classmethod def sample(cls): return cls(id=str(uuid.uuid1()), meter='instance', type='gauge', unit='instance', volume=1, resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', project_id='35b17138-b364-4e6a-a131-8f3099c5be68', user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff', timestamp=timeutils.utcnow(), recorded_at=datetime.datetime.utcnow(), source='openstack', metadata={'name1': 'value1', 'name2': 'value2'}, ) class SamplesController(rest.RestController): """Controller managing the samples.""" @wsme_pecan.wsexpose([Sample], [Query], int) def get_all(self, q=[], limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ if limit and limit < 0: raise ClientSideError(_("Limit must be positive")) kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit)) @wsme_pecan.wsexpose(Sample, wtypes.text) def get_one(self, sample_id): """Return a sample :param sample_id: the id of the sample """ f = storage.SampleFilter(message_id=sample_id) samples = list(pecan.request.storage_conn.get_samples(f)) if len(samples) < 1: raise EntityNotFound(_('Sample'), sample_id) return Sample.from_db_model(samples[0]) class ComplexQuery(_Base): """Holds a sample query encoded in json.""" filter = wtypes.text "The filter expression encoded in json." orderby = wtypes.text "List of single-element dicts for specifing the ordering of the results." limit = int "The maximum number of results to be returned." @classmethod def sample(cls): return cls(filter='{\"and\": [{\"and\": [{\"=\": ' + '{\"counter_name\": \"cpu_util\"}}, ' + '{\">\": {\"counter_volume\": 0.23}}, ' + '{\"<\": {\"counter_volume\": 0.26}}]}, ' + '{\"or\": [{\"and\": [{\">\": ' + '{\"timestamp\": \"2013-12-01T18:00:00\"}}, ' + '{\"<\": ' + '{\"timestamp\": \"2013-12-01T18:15:00\"}}]}, ' + '{\"and\": [{\">\": ' + '{\"timestamp\": \"2013-12-01T18:30:00\"}}, ' + '{\"<\": ' + '{\"timestamp\": \"2013-12-01T18:45:00\"}}]}]}]}', orderby='[{\"counter_volume\": \"ASC\"}, ' + '{\"timestamp\": \"DESC\"}]', limit=42 ) def _list_to_regexp(items, regexp_prefix=""): regexp = ["^%s$" % item for item in items] regexp = regexp_prefix + "|".join(regexp) return regexp class ValidatedComplexQuery(object): complex_operators = ["and", "or"] order_directions = ["asc", "desc"] simple_ops = ["=", "!=", "<", ">", "<=", "=<", ">=", "=>"] regexp_prefix = "(?i)" complex_ops = _list_to_regexp(complex_operators, regexp_prefix) simple_ops = _list_to_regexp(simple_ops, regexp_prefix) order_directions = _list_to_regexp(order_directions, regexp_prefix) timestamp_fields = ["timestamp", "state_timestamp"] def __init__(self, query, db_model, additional_name_mapping={}, metadata_allowed=False): self.name_mapping = {"user": "user_id", "project": "project_id"} self.name_mapping.update(additional_name_mapping) valid_keys = db_model.get_field_names() valid_keys = list(valid_keys) + self.name_mapping.keys() valid_fields = _list_to_regexp(valid_keys) if metadata_allowed: valid_filter_fields = valid_fields + "|^metadata\.[\S]+$" else: valid_filter_fields = valid_fields schema_value = { "oneOf": [{"type": "string"}, {"type": "number"}, {"type": "boolean"}], "minProperties": 1, "maxProperties": 1} schema_value_in = { "type": "array", "items": {"oneOf": [{"type": "string"}, {"type": "number"}]}} schema_field = { "type": "object", "patternProperties": {valid_filter_fields: schema_value}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} schema_field_in = { "type": "object", "patternProperties": {valid_filter_fields: schema_value_in}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} schema_leaf_in = { "type": "object", "patternProperties": {"(?i)^in$": schema_field_in}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} schema_leaf_simple_ops = { "type": "object", "patternProperties": {self.simple_ops: schema_field}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} schema_and_or_array = { "type": "array", "items": {"$ref": "#"}, "minItems": 2} schema_and_or = { "type": "object", "patternProperties": {self.complex_ops: schema_and_or_array}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} schema_not = { "type": "object", "patternProperties": {"(?i)^not$": {"$ref": "#"}}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1} self.schema = { "oneOf": [{"$ref": "#/definitions/leaf_simple_ops"}, {"$ref": "#/definitions/leaf_in"}, {"$ref": "#/definitions/and_or"}, {"$ref": "#/definitions/not"}], "minProperties": 1, "maxProperties": 1, "definitions": {"leaf_simple_ops": schema_leaf_simple_ops, "leaf_in": schema_leaf_in, "and_or": schema_and_or, "not": schema_not}} self.orderby_schema = { "type": "array", "items": { "type": "object", "patternProperties": {valid_fields: {"type": "string", "pattern": self.order_directions}}, "additionalProperties": False, "minProperties": 1, "maxProperties": 1}} self.original_query = query def validate(self, visibility_field): """Validates the query content and does the necessary transformations. """ if self.original_query.filter is wtypes.Unset: self.filter_expr = None else: self.filter_expr = json.loads(self.original_query.filter) self._validate_filter(self.filter_expr) self._replace_isotime_with_datetime(self.filter_expr) self._convert_operator_to_lower_case(self.filter_expr) self._normalize_field_names_for_db_model(self.filter_expr) self._force_visibility(visibility_field) if self.original_query.orderby is wtypes.Unset: self.orderby = None else: self.orderby = json.loads(self.original_query.orderby) self._validate_orderby(self.orderby) self._convert_orderby_to_lower_case(self.orderby) self._normalize_field_names_in_orderby(self.orderby) if self.original_query.limit is wtypes.Unset: self.limit = None else: self.limit = self.original_query.limit if self.limit is not None and self.limit <= 0: msg = _('Limit should be positive') raise ClientSideError(msg) @staticmethod def _convert_orderby_to_lower_case(orderby): for orderby_field in orderby: utils.lowercase_values(orderby_field) def _normalize_field_names_in_orderby(self, orderby): for orderby_field in orderby: self._replace_field_names(orderby_field) def _traverse_postorder(self, tree, visitor): op = tree.keys()[0] if op.lower() in self.complex_operators: for i, operand in enumerate(tree[op]): self._traverse_postorder(operand, visitor) if op.lower() == "not": self._traverse_postorder(tree[op], visitor) visitor(tree) def _check_cross_project_references(self, own_project_id, visibility_field): """Do not allow other than own_project_id """ def check_project_id(subfilter): op = subfilter.keys()[0] if (op.lower() not in self.complex_operators and subfilter[op].keys()[0] == visibility_field and subfilter[op][visibility_field] != own_project_id): raise ProjectNotAuthorized(subfilter[op][visibility_field]) self._traverse_postorder(self.filter_expr, check_project_id) def _force_visibility(self, visibility_field): """If the tenant is not admin insert an extra "and =" clause to the query """ authorized_project = acl.get_limited_to_project(pecan.request.headers) is_admin = authorized_project is None if not is_admin: self._restrict_to_project(authorized_project, visibility_field) self._check_cross_project_references(authorized_project, visibility_field) def _restrict_to_project(self, project_id, visibility_field): restriction = {"=": {visibility_field: project_id}} if self.filter_expr is None: self.filter_expr = restriction else: self.filter_expr = {"and": [restriction, self.filter_expr]} def _replace_isotime_with_datetime(self, filter_expr): def replace_isotime(subfilter): op = subfilter.keys()[0] if (op.lower() not in self.complex_operators and subfilter[op].keys()[0] in self.timestamp_fields): field = subfilter[op].keys()[0] date_time = self._convert_to_datetime(subfilter[op][field]) subfilter[op][field] = date_time self._traverse_postorder(filter_expr, replace_isotime) def _normalize_field_names_for_db_model(self, filter_expr): def _normalize_field_names(subfilter): op = subfilter.keys()[0] if op.lower() not in self.complex_operators: self._replace_field_names(subfilter.values()[0]) self._traverse_postorder(filter_expr, _normalize_field_names) def _replace_field_names(self, subfilter): field = subfilter.keys()[0] value = subfilter[field] if field in self.name_mapping: del subfilter[field] subfilter[self.name_mapping[field]] = value if field.startswith("metadata."): del subfilter[field] subfilter["resource_" + field] = value def _convert_operator_to_lower_case(self, filter_expr): self._traverse_postorder(filter_expr, utils.lowercase_keys) @staticmethod def _convert_to_datetime(isotime): try: date_time = timeutils.parse_isotime(isotime) date_time = date_time.replace(tzinfo=None) return date_time except ValueError: LOG.exception(_("String %s is not a valid isotime") % isotime) msg = _('Failed to parse the timestamp value %s') % isotime raise ClientSideError(msg) def _validate_filter(self, filter_expr): jsonschema.validate(filter_expr, self.schema) def _validate_orderby(self, orderby_expr): jsonschema.validate(orderby_expr, self.orderby_schema) class Resource(_Base): """An externally defined object for which samples have been received. """ resource_id = wtypes.text "The unique identifier for the resource" project_id = wtypes.text "The ID of the owning project or tenant" user_id = wtypes.text "The ID of the user who created the resource or updated it last" first_sample_timestamp = datetime.datetime "UTC date & time not later than the first sample known for this resource" last_sample_timestamp = datetime.datetime "UTC date & time not earlier than the last sample known for this resource" metadata = {wtypes.text: wtypes.text} "Arbitrary metadata associated with the resource" links = [Link] "A list containing a self link and associated meter links" source = wtypes.text "The source where the resource come from" def __init__(self, metadata={}, **kwds): metadata = _flatten_metadata(metadata) super(Resource, self).__init__(metadata=metadata, **kwds) @classmethod def sample(cls): return cls(resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', project_id='35b17138-b364-4e6a-a131-8f3099c5be68', user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff', timestamp=datetime.datetime.utcnow(), source="openstack", metadata={'name1': 'value1', 'name2': 'value2'}, links=[Link(href=('http://localhost:8777/v2/resources/' 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36'), rel='self'), Link(href=('http://localhost:8777/v2/meters/volume?' 'q.field=resource_id&' 'q.value=bd9431c1-8d69-4ad3-803a-' '8d4a6b89fd36'), rel='volume')], ) class ResourcesController(rest.RestController): """Works on resources.""" def _resource_links(self, resource_id, meter_links=1): links = [_make_link('self', pecan.request.host_url, 'resources', resource_id)] if meter_links: for meter in pecan.request.storage_conn.get_meters(resource= resource_id): query = {'field': 'resource_id', 'value': resource_id} links.append(_make_link(meter.name, pecan.request.host_url, 'meters', meter.name, query=query)) return links @wsme_pecan.wsexpose(Resource, unicode) def get_one(self, resource_id): """Retrieve details about one resource. :param resource_id: The UUID of the resource. """ authorized_project = acl.get_limited_to_project(pecan.request.headers) resources = list(pecan.request.storage_conn.get_resources( resource=resource_id, project=authorized_project)) if not resources: raise EntityNotFound(_('Resource'), resource_id) return Resource.from_db_and_links(resources[0], self._resource_links(resource_id)) @wsme_pecan.wsexpose([Resource], [Query], int) def get_all(self, q=[], meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_resources) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(**kwargs)] return resources class AlarmThresholdRule(_Base): meter_name = wsme.wsattr(wtypes.text, mandatory=True) "The name of the meter" #FIXME(sileht): default doesn't work #workaround: default is set in validate method query = wsme.wsattr([Query], default=[]) """The query to find the data for computing statistics. Ownership settings are automatically included based on the Alarm owner. """ period = wsme.wsattr(wtypes.IntegerType(minimum=1), default=60) "The time range in seconds over which query" comparison_operator = AdvEnum('comparison_operator', str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt', default='eq') "The comparison against the alarm threshold" threshold = wsme.wsattr(float, mandatory=True) "The threshold of the alarm" statistic = AdvEnum('statistic', str, 'max', 'min', 'avg', 'sum', 'count', default='avg') "The statistic to compare to the threshold" evaluation_periods = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) "The number of historical periods to evaluate the threshold" exclude_outliers = wsme.wsattr(bool, default=False) "Whether datapoints with anomalously low sample counts are excluded" def __init__(self, query=None, **kwargs): if query: query = [Query(**q) for q in query] super(AlarmThresholdRule, self).__init__(query=query, **kwargs) @staticmethod def validate(threshold_rule): #note(sileht): wsme default doesn't work in some case #workaround for https://bugs.launchpad.net/wsme/+bug/1227039 if not threshold_rule.query: threshold_rule.query = [] #Timestamp is not allowed for AlarmThresholdRule query, as the alarm #evaluator will construct timestamp bounds for the sequence of #statistics queries as the sliding evaluation window advances #over time. _validate_query(threshold_rule.query, storage.SampleFilter.__init__, allow_timestamps=False) return threshold_rule @property def default_description(self): return _( 'Alarm when %(meter_name)s is %(comparison_operator)s a ' '%(statistic)s of %(threshold)s over %(period)s seconds') % \ dict(comparison_operator=self.comparison_operator, statistic=self.statistic, threshold=self.threshold, meter_name=self.meter_name, period=self.period) def as_dict(self): rule = self.as_dict_from_keys(['period', 'comparison_operator', 'threshold', 'statistic', 'evaluation_periods', 'meter_name', 'exclude_outliers']) rule['query'] = [q.as_dict() for q in self.query] return rule @classmethod def sample(cls): return cls(meter_name='cpu_util', period=60, evaluation_periods=1, threshold=300.0, statistic='avg', comparison_operator='gt', query=[{'field': 'resource_id', 'value': '2a4d689b-f0b8-49c1-9eef-87cae58d80db', 'op': 'eq', 'type': 'string'}]) class AlarmCombinationRule(_Base): operator = AdvEnum('operator', str, 'or', 'and', default='and') "How to combine the sub-alarms" alarm_ids = wsme.wsattr([wtypes.text], mandatory=True) "List of alarm identifiers to combine" @property def default_description(self): joiner = ' %s ' % self.operator return _('Combined state of alarms %s') % joiner.join(self.alarm_ids) def as_dict(self): return self.as_dict_from_keys(['operator', 'alarm_ids']) @classmethod def sample(cls): return cls(operator='or', alarm_ids=['739e99cb-c2ec-4718-b900-332502355f38', '153462d0-a9b8-4b5b-8175-9e4b05e9b856']) class AlarmTimeConstraint(_Base): """Representation of a time constraint on an alarm.""" name = wsme.wsattr(wtypes.text, mandatory=True) "The name of the constraint" _description = None # provide a default def get_description(self): if not self._description: return 'Time constraint at %s lasting for %s seconds' \ % (self.start, self.duration) return self._description def set_description(self, value): self._description = value description = wsme.wsproperty(wtypes.text, get_description, set_description) "The description of the constraint" start = wsme.wsattr(CronType(), mandatory=True) "Start point of the time constraint, in cron format" duration = wsme.wsattr(wtypes.IntegerType(minimum=0), mandatory=True) "How long the constraint should last, in seconds" timezone = wsme.wsattr(wtypes.text, default="") "Timezone of the constraint" def as_dict(self): return self.as_dict_from_keys(['name', 'description', 'start', 'duration', 'timezone']) @staticmethod def validate(tc): if tc.timezone: try: pytz.timezone(tc.timezone) except Exception: raise ClientSideError(_("Timezone %s is not valid") % tc.timezone) return tc @classmethod def sample(cls): return cls(name='SampleConstraint', description='nightly build every night at 23h for 3 hours', start='0 23 * * *', duration=10800, timezone='Europe/Ljubljana') class Alarm(_Base): """Representation of an alarm. .. note:: combination_rule and threshold_rule are mutually exclusive. The *type* of the alarm should be set to *threshold* or *combination* and the appropriate rule should be filled. """ alarm_id = wtypes.text "The UUID of the alarm" name = wsme.wsattr(wtypes.text, mandatory=True) "The name for the alarm" _description = None # provide a default def get_description(self): rule = getattr(self, '%s_rule' % self.type, None) if not self._description and rule: return six.text_type(rule.default_description) return self._description def set_description(self, value): self._description = value description = wsme.wsproperty(wtypes.text, get_description, set_description) "The description of the alarm" enabled = wsme.wsattr(bool, default=True) "This alarm is enabled?" ok_actions = wsme.wsattr([wtypes.text], default=[]) "The actions to do when alarm state change to ok" alarm_actions = wsme.wsattr([wtypes.text], default=[]) "The actions to do when alarm state change to alarm" insufficient_data_actions = wsme.wsattr([wtypes.text], default=[]) "The actions to do when alarm state change to insufficient data" repeat_actions = wsme.wsattr(bool, default=False) "The actions should be re-triggered on each evaluation cycle" type = AdvEnum('type', str, 'threshold', 'combination', mandatory=True) "Explicit type specifier to select which rule to follow below." threshold_rule = AlarmThresholdRule "Describe when to trigger the alarm based on computed statistics" combination_rule = AlarmCombinationRule """Describe when to trigger the alarm based on combining the state of other alarms""" time_constraints = wtypes.wsattr([AlarmTimeConstraint], default=[]) """Describe time constraints for the alarm""" # These settings are ignored in the PUT or POST operations, but are # filled in for GET project_id = wtypes.text "The ID of the project or tenant that owns the alarm" user_id = wtypes.text "The ID of the user who created the alarm" timestamp = datetime.datetime "The date of the last alarm definition update" state = AdvEnum('state', str, *state_kind, default='insufficient data') "The state offset the alarm" state_timestamp = datetime.datetime "The date of the last alarm state changed" def __init__(self, rule=None, time_constraints=None, **kwargs): super(Alarm, self).__init__(**kwargs) if rule: if self.type == 'threshold': self.threshold_rule = AlarmThresholdRule(**rule) elif self.type == 'combination': self.combination_rule = AlarmCombinationRule(**rule) if time_constraints: self.time_constraints = [AlarmTimeConstraint(**tc) for tc in time_constraints] @staticmethod def validate(alarm): Alarm.check_rule(alarm) if alarm.threshold_rule: # ensure an implicit constraint on project_id is added to # the query if not already present alarm.threshold_rule.query = _sanitize_query( alarm.threshold_rule.query, storage.SampleFilter.__init__, on_behalf_of=alarm.project_id ) elif alarm.combination_rule: project = _get_auth_project(alarm.project_id if alarm.project_id != wtypes.Unset else None) for id in alarm.combination_rule.alarm_ids: alarms = list(pecan.request.storage_conn.get_alarms( alarm_id=id, project=project)) if not alarms: raise EntityNotFound(_('Alarm'), id) tc_names = [tc.name for tc in alarm.time_constraints] if len(tc_names) > len(set(tc_names)): error = _("Time constraint names must be " "unique for a given alarm.") raise ClientSideError(error) return alarm @staticmethod def check_rule(alarm): rule = '%s_rule' % alarm.type if getattr(alarm, rule) in (wtypes.Unset, None): error = _("%(rule)s must be set for %(type)s" " type alarm") % {"rule": rule, "type": alarm.type} raise ClientSideError(error) if alarm.threshold_rule and alarm.combination_rule: error = _("threshold_rule and combination_rule " "cannot be set at the same time") raise ClientSideError(error) @classmethod def sample(cls): return cls(alarm_id=None, name="SwiftObjectAlarm", description="An alarm", type='combination', threshold_rule=None, combination_rule=AlarmCombinationRule.sample(), time_constraints=[AlarmTimeConstraint.sample().as_dict()], user_id="c96c887c216949acbdfbd8b494863567", project_id="c96c887c216949acbdfbd8b494863567", enabled=True, timestamp=datetime.datetime.utcnow(), state="ok", state_timestamp=datetime.datetime.utcnow(), ok_actions=["http://site:8000/ok"], alarm_actions=["http://site:8000/alarm"], insufficient_data_actions=["http://site:8000/nodata"], repeat_actions=False, ) def as_dict(self, db_model): d = super(Alarm, self).as_dict(db_model) for k in d: if k.endswith('_rule'): del d[k] d['rule'] = getattr(self, "%s_rule" % self.type).as_dict() d['time_constraints'] = [tc.as_dict() for tc in self.time_constraints] return d class AlarmChange(_Base): """Representation of an event in an alarm's history """ event_id = wtypes.text "The UUID of the change event" alarm_id = wtypes.text "The UUID of the alarm" type = wtypes.Enum(str, 'creation', 'rule change', 'state transition', 'deletion') "The type of change" detail = wtypes.text "JSON fragment describing change" project_id = wtypes.text "The project ID of the initiating identity" user_id = wtypes.text "The user ID of the initiating identity" on_behalf_of = wtypes.text "The tenant on behalf of which the change is being made" timestamp = datetime.datetime "The time/date of the alarm change" @classmethod def sample(cls): return cls(alarm_id='e8ff32f772a44a478182c3fe1f7cad6a', type='rule change', detail='{"threshold": 42.0, "evaluation_periods": 4}', user_id="3e5d11fda79448ac99ccefb20be187ca", project_id="b6f16144010811e387e4de429e99ee8c", on_behalf_of="92159030020611e3b26dde429e99ee8c", timestamp=datetime.datetime.utcnow(), ) class AlarmController(rest.RestController): """Manages operations on a single alarm. """ _custom_actions = { 'history': ['GET'], 'state': ['PUT', 'GET'], } def __init__(self, alarm_id): pecan.request.context['alarm_id'] = alarm_id self._id = alarm_id def _alarm(self): self.conn = pecan.request.storage_conn auth_project = acl.get_limited_to_project(pecan.request.headers) alarms = list(self.conn.get_alarms(alarm_id=self._id, project=auth_project)) if not alarms: raise EntityNotFound(_('Alarm'), self._id) return alarms[0] def _record_change(self, data, now, on_behalf_of=None, type=None): if not cfg.CONF.alarm.record_history: return type = type or storage.models.AlarmChange.RULE_CHANGE scrubbed_data = utils.stringify_timestamps(data) detail = json.dumps(scrubbed_data) user_id = pecan.request.headers.get('X-User-Id') project_id = pecan.request.headers.get('X-Project-Id') on_behalf_of = on_behalf_of or project_id payload = dict(event_id=str(uuid.uuid4()), alarm_id=self._id, type=type, detail=detail, user_id=user_id, project_id=project_id, on_behalf_of=on_behalf_of, timestamp=now) try: self.conn.record_alarm_change(payload) except NotImplementedError: pass # Revert to the pre-json'ed details ... payload['detail'] = scrubbed_data _send_notification(type, payload) @wsme_pecan.wsexpose(Alarm) def get(self): """Return this alarm. """ return Alarm.from_db_model(self._alarm()) @wsme_pecan.wsexpose(Alarm, body=Alarm) def put(self, data): """Modify this alarm. :param data: an alarm within the request body. """ # Ensure alarm exists alarm_in = self._alarm() now = timeutils.utcnow() data.alarm_id = self._id user, project = acl.get_limited_to(pecan.request.headers) if user: data.user_id = user elif data.user_id == wtypes.Unset: data.user_id = alarm_in.user_id if project: data.project_id = project elif data.project_id == wtypes.Unset: data.project_id = alarm_in.project_id data.timestamp = now if alarm_in.state != data.state: data.state_timestamp = now else: data.state_timestamp = alarm_in.state_timestamp # make sure alarms are unique by name per project. if alarm_in.name != data.name: alarms = list(self.conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise ClientSideError( _("Alarm with name=%s exists") % data.name, status_code=409) # should check if there is any circle in the dependency, but for # efficiency reason, here only check alarm cannot depend on itself if data.type == 'combination': if self._id in data.combination_rule.alarm_ids: raise ClientSideError(_('Cannot specify alarm %s itself in ' 'combination rule') % self._id) old_alarm = Alarm.from_db_model(alarm_in).as_dict(storage.models.Alarm) updated_alarm = data.as_dict(storage.models.Alarm) try: alarm_in = storage.models.Alarm(**updated_alarm) except Exception: LOG.exception(_("Error while putting alarm: %s") % updated_alarm) raise ClientSideError(_("Alarm incorrect")) alarm = self.conn.update_alarm(alarm_in) change = dict((k, v) for k, v in updated_alarm.items() if v != old_alarm[k] and k not in ['timestamp', 'state_timestamp']) self._record_change(change, now, on_behalf_of=alarm.project_id) return Alarm.from_db_model(alarm) @wsme_pecan.wsexpose(None, status_code=204) def delete(self): """Delete this alarm. """ # ensure alarm exists before deleting alarm = self._alarm() self.conn.delete_alarm(alarm.alarm_id) change = Alarm.from_db_model(alarm).as_dict(storage.models.Alarm) self._record_change(change, timeutils.utcnow(), type=storage.models.AlarmChange.DELETION) # TODO(eglynn): add pagination marker to signature once overall # API support for pagination is finalized @wsme_pecan.wsexpose([AlarmChange], [Query]) def history(self, q=[]): """Assembles the alarm history requested. :param q: Filter rules for the changes to be described. """ # allow history to be returned for deleted alarms, but scope changes # returned to those carried out on behalf of the auth'd tenant, to # avoid inappropriate cross-tenant visibility of alarm history auth_project = acl.get_limited_to_project(pecan.request.headers) conn = pecan.request.storage_conn kwargs = _query_to_kwargs(q, conn.get_alarm_changes, ['on_behalf_of']) return [AlarmChange.from_db_model(ac) for ac in conn.get_alarm_changes(self._id, auth_project, **kwargs)] @wsme.validate(state_kind_enum) @wsme_pecan.wsexpose(state_kind_enum, body=state_kind_enum) def put_state(self, state): """Set the state of this alarm. :param state: an alarm state within the request body. """ # note(sileht): body are not validated by wsme # Workaround for https://bugs.launchpad.net/wsme/+bug/1227229 if state not in state_kind: raise ClientSideError(_("state invalid")) now = timeutils.utcnow() alarm = self._alarm() alarm.state = state alarm.state_timestamp = now alarm = self.conn.update_alarm(alarm) change = {'state': alarm.state} self._record_change(change, now, on_behalf_of=alarm.project_id, type=storage.models.AlarmChange.STATE_TRANSITION) return alarm.state @wsme_pecan.wsexpose(state_kind_enum) def get_state(self): """Get the state of this alarm. """ alarm = self._alarm() return alarm.state class AlarmsController(rest.RestController): """Manages operations on the alarms collection. """ @pecan.expose() def _lookup(self, alarm_id, *remainder): return AlarmController(alarm_id), remainder def _record_creation(self, conn, data, alarm_id, now): if not cfg.CONF.alarm.record_history: return type = storage.models.AlarmChange.CREATION scrubbed_data = utils.stringify_timestamps(data) detail = json.dumps(scrubbed_data) user_id = pecan.request.headers.get('X-User-Id') project_id = pecan.request.headers.get('X-Project-Id') payload = dict(event_id=str(uuid.uuid4()), alarm_id=alarm_id, type=type, detail=detail, user_id=user_id, project_id=project_id, on_behalf_of=project_id, timestamp=now) try: conn.record_alarm_change(payload) except NotImplementedError: pass # Revert to the pre-json'ed details ... payload['detail'] = scrubbed_data _send_notification(type, payload) @wsme_pecan.wsexpose(Alarm, body=Alarm, status_code=201) def post(self, data): """Create a new alarm. :param data: an alarm within the request body. """ conn = pecan.request.storage_conn now = timeutils.utcnow() data.alarm_id = str(uuid.uuid4()) user_limit, project_limit = acl.get_limited_to(pecan.request.headers) def _set_ownership(aspect, owner_limitation, header): attr = '%s_id' % aspect requested_owner = getattr(data, attr) explicit_owner = requested_owner != wtypes.Unset caller = pecan.request.headers.get(header) if (owner_limitation and explicit_owner and requested_owner != caller): raise ProjectNotAuthorized(requested_owner, aspect) actual_owner = (owner_limitation or requested_owner if explicit_owner else caller) setattr(data, attr, actual_owner) _set_ownership('user', user_limit, 'X-User-Id') _set_ownership('project', project_limit, 'X-Project-Id') data.timestamp = now data.state_timestamp = now change = data.as_dict(storage.models.Alarm) # make sure alarms are unique by name per project. alarms = list(conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise ClientSideError( _("Alarm with name='%s' exists") % data.name, status_code=409) try: alarm_in = storage.models.Alarm(**change) except Exception: LOG.exception(_("Error while posting alarm: %s") % change) raise ClientSideError(_("Alarm incorrect")) alarm = conn.create_alarm(alarm_in) self._record_creation(conn, change, alarm.alarm_id, now) return Alarm.from_db_model(alarm) @wsme_pecan.wsexpose([Alarm], [Query]) def get_all(self, q=[]): """Return all alarms, based on the query provided. :param q: Filter rules for the alarms to be returned. """ #Timestamp is not supported field for Simple Alarm queries kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_alarms, allow_timestamps=False) return [Alarm.from_db_model(m) for m in pecan.request.storage_conn.get_alarms(**kwargs)] class TraitDescription(_Base): """A description of a trait, with no associated value.""" type = wtypes.text "the data type, defaults to string" name = wtypes.text "the name of the trait" @classmethod def sample(cls): return cls(name='service', type='string' ) class EventQuery(Query): """Query arguments for Event Queries.""" _supported_types = ['integer', 'float', 'string', 'datetime'] type = wsme.wsattr(wtypes.text, default='string') "the type of the trait filter, defaults to string" def __repr__(self): # for logging calls return '' % (self.field, self.op, self._get_value_as_type(), self.type) class Trait(_Base): """A Trait associated with an event.""" name = wtypes.text "The name of the trait" value = wtypes.text "the value of the trait" type = wtypes.text "the type of the trait (string, integer, float or datetime)" @classmethod def sample(cls): return cls(name='service', type='string', value='compute.hostname' ) class Event(_Base): """A System event.""" message_id = wtypes.text "The message ID for the notification" event_type = wtypes.text "The type of the event" _traits = None def get_traits(self): return self._traits @staticmethod def _convert_storage_trait(t): """Helper method to convert a storage model into an API trait instance. If an API trait instance is passed in, just return it. """ if isinstance(t, Trait): return t value = (six.text_type(t.value) if not t.dtype == storage.models.Trait.DATETIME_TYPE else t.value.isoformat()) type = storage.models.Trait.get_name_by_type(t.dtype) return Trait(name=t.name, type=type, value=value) def set_traits(self, traits): self._traits = map(self._convert_storage_trait, traits) traits = wsme.wsproperty(wtypes.ArrayType(Trait), get_traits, set_traits) "Event specific properties" generated = datetime.datetime "The time the event occurred" @classmethod def sample(cls): return cls( event_type='compute.instance.update', generated='2013-11-11T20:00:00', message_id='94834db1-8f1b-404d-b2ec-c35901f1b7f0', traits={ 'request_id': 'req-4e2d67b8-31a4-48af-bb2f-9df72a353a72', 'service': 'conductor.tem-devstack-01', 'tenant_id': '7f13f2b17917463b9ee21aa92c4b36d6' } ) def requires_admin(func): @functools.wraps(func) def wrapped(*args, **kwargs): usr_limit, proj_limit = acl.get_limited_to(pecan.request.headers) # If User and Project are None, you have full access. if usr_limit and proj_limit: raise ProjectNotAuthorized(proj_limit) return func(*args, **kwargs) return wrapped def _event_query_to_event_filter(q): evt_model_filter = { 'event_type': None, 'message_id': None, 'start_time': None, 'end_time': None } traits_filter = [] for i in q: # FIXME(herndon): Support for operators other than # 'eq' will come later. if i.op != 'eq': error = _("operator %s not supported") % i.op raise ClientSideError(error) if i.field in evt_model_filter: evt_model_filter[i.field] = i.value else: traits_filter.append({"key": i.field, i.type: i._get_value_as_type()}) return storage.EventFilter(traits_filter=traits_filter, **evt_model_filter) class TraitsController(rest.RestController): """Works on Event Traits.""" @requires_admin @wsme_pecan.wsexpose([Trait], wtypes.text, wtypes.text) def get_one(self, event_type, trait_name): """Return all instances of a trait for an event type. :param event_type: Event type to filter traits by :param trait_name: Trait to return values for """ LOG.debug(_("Getting traits for %s") % event_type) return [Trait(name=t.name, type=t.get_type_name(), value=t.value) for t in pecan.request.storage_conn .get_traits(event_type, trait_name)] @requires_admin @wsme_pecan.wsexpose([TraitDescription], wtypes.text) def get_all(self, event_type): """Return all trait names for an event type. :param event_type: Event type to filter traits by """ get_trait_name = storage.models.Trait.get_name_by_type return [TraitDescription(name=t['name'], type=get_trait_name(t['data_type'])) for t in pecan.request.storage_conn .get_trait_types(event_type)] class EventTypesController(rest.RestController): """Works on Event Types in the system.""" traits = TraitsController() @pecan.expose() def get_one(self, event_type): pecan.abort(404) @requires_admin @wsme_pecan.wsexpose([unicode]) def get_all(self): """Get all event types. """ return list(pecan.request.storage_conn.get_event_types()) class EventsController(rest.RestController): """Works on Events.""" @requires_admin @wsme_pecan.wsexpose([Event], [EventQuery]) def get_all(self, q=[]): """Return all events matching the query filters. :param q: Filter arguments for which Events to return """ event_filter = _event_query_to_event_filter(q) return [Event(message_id=event.message_id, event_type=event.event_type, generated=event.generated, traits=event.traits) for event in pecan.request.storage_conn.get_events(event_filter)] @requires_admin @wsme_pecan.wsexpose(Event, wtypes.text) def get_one(self, message_id): """Return a single event with the given message id. :param message_id: Message ID of the Event to be returned """ event_filter = storage.EventFilter(message_id=message_id) events = pecan.request.storage_conn.get_events(event_filter) if not events: raise EntityNotFound(_("Event"), message_id) if len(events) > 1: LOG.error(_("More than one event with " "id %s returned from storage driver") % message_id) event = events[0] return Event(message_id=event.message_id, event_type=event.event_type, generated=event.generated, traits=event.traits) class QuerySamplesController(rest.RestController): """Provides complex query possibilities for samples """ @wsme_pecan.wsexpose([Sample], body=ComplexQuery) def post(self, body): """Define query for retrieving Sample data. :param body: Query rules for the samples to be returned. """ sample_name_mapping = {"resource": "resource_id", "meter": "counter_name", "type": "counter_type", "unit": "counter_unit", "volume": "counter_volume"} query = ValidatedComplexQuery(body, storage.models.Sample, sample_name_mapping, metadata_allowed=True) query.validate(visibility_field="project_id") conn = pecan.request.storage_conn return [Sample.from_db_model(s) for s in conn.query_samples(query.filter_expr, query.orderby, query.limit)] class QueryAlarmHistoryController(rest.RestController): """Provides complex query possibilites for alarm history """ @wsme_pecan.wsexpose([AlarmChange], body=ComplexQuery) def post(self, body): """Define query for retrieving AlarmChange data. :param body: Query rules for the alarm history to be returned. """ query = ValidatedComplexQuery(body, storage.models.AlarmChange) query.validate(visibility_field="on_behalf_of") conn = pecan.request.storage_conn return [AlarmChange.from_db_model(s) for s in conn.query_alarm_history(query.filter_expr, query.orderby, query.limit)] class QueryAlarmsController(rest.RestController): """Provides complex query possibilities for alarms """ history = QueryAlarmHistoryController() @wsme_pecan.wsexpose([Alarm], body=ComplexQuery) def post(self, body): """Define query for retrieving Alarm data. :param body: Query rules for the alarms to be returned. """ query = ValidatedComplexQuery(body, storage.models.Alarm) query.validate(visibility_field="project_id") conn = pecan.request.storage_conn return [Alarm.from_db_model(s) for s in conn.query_alarms(query.filter_expr, query.orderby, query.limit)] class QueryController(rest.RestController): samples = QuerySamplesController() alarms = QueryAlarmsController() def _flatten_capabilities(capabilities): return dict((k, v) for k, v in utils.recursive_keypairs(capabilities)) class Capabilities(_Base): """A representation of the API capabilities, usually constrained by restrictions imposed by the storage driver. """ api = {wtypes.text: bool} "A flattened dictionary of API capabilities" @classmethod def sample(cls): return cls( api=_flatten_capabilities({ 'meters': {'pagination': True, 'query': {'simple': True, 'metadata': True, 'complex': False}}, 'resources': {'pagination': False, 'query': {'simple': True, 'metadata': True, 'complex': False}}, 'samples': {'pagination': True, 'groupby': True, 'query': {'simple': True, 'metadata': True, 'complex': True}}, 'statistics': {'pagination': True, 'groupby': True, 'query': {'simple': True, 'metadata': True, 'complex': False}, 'aggregation': {'standard': True, 'selectable': { 'max': True, 'min': True, 'sum': True, 'avg': True, 'count': True, 'stddev': True, 'cardinality': True, 'quartile': False}}}, 'alarms': {'query': {'simple': True, 'complex': True}, 'history': {'query': {'simple': True, 'complex': True}}}, 'events': {'query': {'simple': True}}, }) ) class CapabilitiesController(rest.RestController): """Manages capabilities queries. """ @wsme_pecan.wsexpose(Capabilities) def get(self): """Returns a flattened dictionary of API capabilities supported by the currently configured storage driver. """ # variation in API capabilities is effectively determined by # the lack of strict feature parity across storage drivers driver_capabilities = pecan.request.storage_conn.get_capabilities() return Capabilities(api=_flatten_capabilities(driver_capabilities)) class V2Controller(object): """Version 2 API controller root.""" resources = ResourcesController() meters = MetersController() samples = SamplesController() alarms = AlarmsController() event_types = EventTypesController() events = EventsController() query = QueryController() capabilities = CapabilitiesController() ceilometer-2014.1.5/ceilometer/api/controllers/root.py0000664000567000056700000000166212540642615024052 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from ceilometer.api.controllers import v2 class RootController(object): v2 = v2.V2Controller() @pecan.expose(generic=True, template='index.html') def index(self): # FIXME: Return version information return dict() ceilometer-2014.1.5/ceilometer/api/controllers/__init__.py0000664000567000056700000000000012540642613024605 0ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/config.py0000664000567000056700000000203112540642615021755 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Server Specific Configurations server = { 'port': '8777', 'host': '0.0.0.0' } # Pecan Application Configurations app = { 'root': 'ceilometer.api.controllers.root.RootController', 'modules': ['ceilometer.api'], 'static_root': '%(confdir)s/public', 'template_path': '%(confdir)s/ceilometer/api/templates', } # Custom Configurations must be in Python dictionary format:: # # foo = {'bar':'baz'} # # All configurations are accessible at:: # pecan.conf ceilometer-2014.1.5/ceilometer/api/app.wsgi0000664000567000056700000000201512540642615021613 0ustar jenkinsjenkins00000000000000# -*- mode: python -*- # -*- encoding: utf-8 -*- # # Copyright © 2013 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Use this file for deploying the API under mod_wsgi. See http://pecan.readthedocs.org/en/latest/deployment.html for details. """ from ceilometer import service from ceilometer.api import app # Initialize the oslo configuration library and logging service.prepare_service([]) application = app.VersionSelectorApplication() ceilometer-2014.1.5/ceilometer/api/app.py0000664000567000056700000001103512540642615021274 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import socket from wsgiref import simple_server import netaddr from oslo.config import cfg import pecan from ceilometer.api import acl from ceilometer.api import config as api_config from ceilometer.api import hooks from ceilometer.api import middleware from ceilometer.openstack.common import log from ceilometer import storage LOG = log.getLogger(__name__) auth_opts = [ cfg.StrOpt('auth_strategy', default='keystone', help='The strategy to use for auth: noauth or keystone.'), cfg.BoolOpt('enable_v1_api', default=True, help='Deploy the deprecated v1 API.'), ] CONF = cfg.CONF CONF.register_opts(auth_opts) def get_pecan_config(): # Set up the pecan configuration filename = api_config.__file__.replace('.pyc', '.py') return pecan.configuration.conf_from_file(filename) def setup_app(pecan_config=None, extra_hooks=None): # FIXME: Replace DBHook with a hooks.TransactionHook app_hooks = [hooks.ConfigHook(), hooks.DBHook( storage.get_connection(cfg.CONF), ), hooks.PipelineHook(), hooks.TranslationHook()] if extra_hooks: app_hooks.extend(extra_hooks) if not pecan_config: pecan_config = get_pecan_config() pecan.configuration.set_config(dict(pecan_config), overwrite=True) app = pecan.make_app( pecan_config.app.root, static_root=pecan_config.app.static_root, template_path=pecan_config.app.template_path, debug=CONF.debug, force_canonical=getattr(pecan_config.app, 'force_canonical', True), hooks=app_hooks, wrap_app=middleware.ParsableErrorMiddleware, guess_content_type_from_ext=False ) if getattr(pecan_config.app, 'enable_acl', True): return acl.install(app, cfg.CONF) return app class VersionSelectorApplication(object): def __init__(self): pc = get_pecan_config() pc.app.debug = CONF.debug pc.app.enable_acl = (CONF.auth_strategy == 'keystone') if cfg.CONF.enable_v1_api: from ceilometer.api.v1 import app as v1app self.v1 = v1app.make_app(cfg.CONF, enable_acl=pc.app.enable_acl) else: def not_found(environ, start_response): start_response('404 Not Found', []) return [] self.v1 = not_found self.v2 = setup_app(pecan_config=pc) def __call__(self, environ, start_response): if environ['PATH_INFO'].startswith('/v1/'): return self.v1(environ, start_response) return self.v2(environ, start_response) def get_server_cls(host): """Return an appropriate WSGI server class base on provided host :param host: The listen host for the ceilometer API server. """ server_cls = simple_server.WSGIServer if netaddr.valid_ipv6(host): # NOTE(dzyu) make sure use IPv6 sockets if host is in IPv6 pattern if getattr(server_cls, 'address_family') == socket.AF_INET: class server_cls(server_cls): address_family = socket.AF_INET6 return server_cls def build_server(): # Build the WSGI app root = VersionSelectorApplication() # Create the WSGI server and start it host, port = cfg.CONF.api.host, cfg.CONF.api.port server_cls = get_server_cls(host) srv = simple_server.make_server(host, port, root, server_cls) LOG.info(_('Starting server in PID %s') % os.getpid()) LOG.info(_("Configuration:")) cfg.CONF.log_opt_values(LOG, logging.INFO) if host == '0.0.0.0': LOG.info(_( 'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s') % ({'sport': port, 'vport': port})) else: LOG.info(_("serving on http://%(host)s:%(port)s") % ( {'host': host, 'port': port})) return srv ceilometer-2014.1.5/ceilometer/api/__init__.py0000664000567000056700000000250712540642615022257 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg # Register options for the service API_SERVICE_OPTS = [ cfg.IntOpt('port', default=8777, deprecated_name='metering_api_port', deprecated_group='DEFAULT', help='The port for the ceilometer API server.', ), cfg.StrOpt('host', default='0.0.0.0', help='The listen IP for the ceilometer API server.', ), ] CONF = cfg.CONF opt_group = cfg.OptGroup(name='api', title='Options for the ceilometer-api service') CONF.register_group(opt_group) CONF.register_opts(API_SERVICE_OPTS, opt_group) ceilometer-2014.1.5/ceilometer/api/hooks.py0000664000567000056700000000454212540642615021644 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # Angus Salkeld # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import threading from oslo.config import cfg from pecan import hooks from ceilometer import pipeline class ConfigHook(hooks.PecanHook): """Attach the configuration object to the request so controllers can get to it. """ def before(self, state): state.request.cfg = cfg.CONF class DBHook(hooks.PecanHook): def __init__(self, storage_connection): self.storage_connection = storage_connection def before(self, state): state.request.storage_conn = self.storage_connection class PipelineHook(hooks.PecanHook): '''Create and attach a pipeline to the request so that new samples can be posted via the /v2/meters/ API. ''' pipeline_manager = None def __init__(self): if self.__class__.pipeline_manager is None: # this is done here as the cfg options are not available # when the file is imported. self.__class__.pipeline_manager = pipeline.setup_pipeline() def before(self, state): state.request.pipeline_manager = self.pipeline_manager class TranslationHook(hooks.PecanHook): def __init__(self): # Use thread local storage to make this thread safe in situations # where one pecan instance is being used to serve multiple request # threads. self.local_error = threading.local() self.local_error.translatable_error = None def before(self, state): self.local_error.translatable_error = None def after(self, state): if hasattr(state.response, 'translatable_error'): self.local_error.translatable_error = ( state.response.translatable_error) ceilometer-2014.1.5/ceilometer/api/acl.py0000664000567000056700000000417412540642615021261 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Access Control Lists (ACL's) control access the API server.""" from ceilometer.openstack.common import policy from keystoneclient.middleware import auth_token from oslo.config import cfg _ENFORCER = None OPT_GROUP_NAME = 'keystone_authtoken' def register_opts(conf): """Register keystoneclient middleware options """ conf.register_opts(auth_token.opts, group=OPT_GROUP_NAME) auth_token.CONF = conf register_opts(cfg.CONF) def install(app, conf): """Install ACL check on application.""" return auth_token.AuthProtocol(app, conf=dict(conf.get(OPT_GROUP_NAME))) def get_limited_to(headers): """Return the user and project the request should be limited to. :param headers: HTTP headers dictionary :return: A tuple of (user, project), set to None if there's no limit on one of these. """ global _ENFORCER if not _ENFORCER: _ENFORCER = policy.Enforcer() if not _ENFORCER.enforce('context_is_admin', {}, {'roles': headers.get('X-Roles', "").split(",")}): return headers.get('X-User-Id'), headers.get('X-Project-Id') return None, None def get_limited_to_project(headers): """Return the project the request should be limited to. :param headers: HTTP headers dictionary :return: A project, or None if there's no limit on it. """ return get_limited_to(headers)[1] ceilometer-2014.1.5/ceilometer/api/middleware.py0000664000567000056700000001255012540642615022634 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware to replace the plain text message body of an error response with one formatted so the client can parse it. Based on pecan.middleware.errordocument """ import json from lxml import etree import webob from ceilometer.api import hooks from ceilometer.openstack.common import gettextutils from ceilometer.openstack.common.gettextutils import _ # noqa from ceilometer.openstack.common import log LOG = log.getLogger(__name__) class ParsableErrorMiddleware(object): """Replace error body with something the client can parse. """ @staticmethod def best_match_language(accept_language): """Determines best available locale from the Accept-Language header. :returns: the best language match or None if the 'Accept-Language' header was not available in the request. """ if not accept_language: return None all_languages = gettextutils.get_available_languages('ceilometer') return accept_language.best_match(all_languages) def __init__(self, app): self.app = app def __call__(self, environ, start_response): # Request for this state, modified by replace_start_response() # and used when an error is being reported. state = {} def replacement_start_response(status, headers, exc_info=None): """Overrides the default response to make errors parsable. """ try: status_code = int(status.split(' ')[0]) state['status_code'] = status_code except (ValueError, TypeError): # pragma: nocover raise Exception(( 'ErrorDocumentMiddleware received an invalid ' 'status %s' % status )) else: if (state['status_code'] / 100) not in (2, 3): # Remove some headers so we can replace them later # when we have the full error message and can # compute the length. headers = [(h, v) for (h, v) in headers if h not in ('Content-Length', 'Content-Type') ] # Save the headers in case we need to modify them. state['headers'] = headers return start_response(status, headers, exc_info) app_iter = self.app(environ, replacement_start_response) if (state['status_code'] / 100) not in (2, 3): req = webob.Request(environ) # Find the first TranslationHook in the array of hooks and use the # translatable_error object from it error = None for hook in self.app.hooks: if isinstance(hook, hooks.TranslationHook): error = hook.local_error.translatable_error break user_locale = self.best_match_language(req.accept_language) if (req.accept.best_match(['application/json', 'application/xml']) == 'application/xml'): try: # simple check xml is valid fault = etree.fromstring('\n'.join(app_iter)) # Add the translated error to the xml data if error is not None: for fault_string in fault.findall('faultstring'): fault_string.text = ( gettextutils.translate( error, user_locale)) body = ['' + etree.tostring(fault) + ''] except etree.XMLSyntaxError as err: LOG.error(_('Error parsing HTTP response: %s') % err) body = ['%s' % state['status_code'] + ''] state['headers'].append(('Content-Type', 'application/xml')) else: try: fault = json.loads('\n'.join(app_iter)) if error is not None and 'faultstring' in fault: fault['faultstring'] = ( gettextutils.translate( error, user_locale)) body = [json.dumps({'error_message': fault})] except ValueError as err: body = [json.dumps({'error_message': '\n'.join(app_iter)})] state['headers'].append(('Content-Type', 'application/json')) state['headers'].append(('Content-Length', str(len(body[0])))) else: body = app_iter return body ceilometer-2014.1.5/ceilometer/api/v1/0000775000567000056700000000000012540643223020464 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/app.py0000664000567000056700000000364712540642615021634 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2012 New Dream Network, LLC (DreamHost) # # Author: Doug Hellmann # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Set up the API server application instance.""" import flask from oslo.config import cfg from ceilometer.api import acl from ceilometer.api.v1 import blueprint as v1_blueprint from ceilometer.openstack.common import jsonutils from ceilometer import storage class JSONEncoder(flask.json.JSONEncoder): @staticmethod def default(o): return jsonutils.to_primitive(o) def make_app(conf, enable_acl=True, attach_storage=True, sources_file='sources.json'): app = flask.Flask('ceilometer.api') app.register_blueprint(v1_blueprint.blueprint, url_prefix='/v1') app.json_encoder = JSONEncoder try: with open(sources_file, "r") as f: sources = jsonutils.load(f) except IOError: sources = {} @app.before_request def attach_config(): flask.request.cfg = conf flask.request.sources = sources if attach_storage: @app.before_request def attach_storage(): flask.request.storage_conn = \ storage.get_connection(conf) # Install the middleware wrapper if enable_acl: app.wsgi_app = acl.install(app.wsgi_app, conf) return app # For documentation app = make_app(cfg.CONF, enable_acl=False, attach_storage=False) ceilometer-2014.1.5/ceilometer/api/v1/static/0000775000567000056700000000000012540643223021753 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/static/rickshaw.js0000664000567000056700000017406212540642615024142 0ustar jenkinsjenkins00000000000000var Rickshaw = { namespace: function(namespace, obj) { var parts = namespace.split('.'); var parent = Rickshaw; for(var i = 1, length = parts.length; i < length; i++) { currentPart = parts[i]; parent[currentPart] = parent[currentPart] || {}; parent = parent[currentPart]; } return parent; }, keys: function(obj) { var keys = []; for (var key in obj) keys.push(key); return keys; }, extend: function(destination, source) { for (var property in source) { destination[property] = source[property]; } return destination; } }; if (typeof module !== 'undefined' && module.exports) { var d3 = require('d3'); module.exports = Rickshaw; } /* Adapted from https://github.com/Jakobo/PTClass */ /* Copyright (c) 2005-2010 Sam Stephenson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* Based on Alex Arnell's inheritance implementation. */ /** section: Language * class Class * * Manages Prototype's class-based OOP system. * * Refer to Prototype's web site for a [tutorial on classes and * inheritance](http://prototypejs.org/learn/class-inheritance). **/ (function(globalContext) { /* ------------------------------------ */ /* Import from object.js */ /* ------------------------------------ */ var _toString = Object.prototype.toString, NULL_TYPE = 'Null', UNDEFINED_TYPE = 'Undefined', BOOLEAN_TYPE = 'Boolean', NUMBER_TYPE = 'Number', STRING_TYPE = 'String', OBJECT_TYPE = 'Object', FUNCTION_CLASS = '[object Function]'; function isFunction(object) { return _toString.call(object) === FUNCTION_CLASS; } function extend(destination, source) { for (var property in source) if (source.hasOwnProperty(property)) // modify protect primitive slaughter destination[property] = source[property]; return destination; } function keys(object) { if (Type(object) !== OBJECT_TYPE) { throw new TypeError(); } var results = []; for (var property in object) { if (object.hasOwnProperty(property)) { results.push(property); } } return results; } function Type(o) { switch(o) { case null: return NULL_TYPE; case (void 0): return UNDEFINED_TYPE; } var type = typeof o; switch(type) { case 'boolean': return BOOLEAN_TYPE; case 'number': return NUMBER_TYPE; case 'string': return STRING_TYPE; } return OBJECT_TYPE; } function isUndefined(object) { return typeof object === "undefined"; } /* ------------------------------------ */ /* Import from Function.js */ /* ------------------------------------ */ var slice = Array.prototype.slice; function argumentNames(fn) { var names = fn.toString().match(/^[\s\(]*function[^(]*\(([^)]*)\)/)[1] .replace(/\/\/.*?[\r\n]|\/\*(?:.|[\r\n])*?\*\//g, '') .replace(/\s+/g, '').split(','); return names.length == 1 && !names[0] ? [] : names; } function wrap(fn, wrapper) { var __method = fn; return function() { var a = update([bind(__method, this)], arguments); return wrapper.apply(this, a); } } function update(array, args) { var arrayLength = array.length, length = args.length; while (length--) array[arrayLength + length] = args[length]; return array; } function merge(array, args) { array = slice.call(array, 0); return update(array, args); } function bind(fn, context) { if (arguments.length < 2 && isUndefined(arguments[0])) return this; var __method = fn, args = slice.call(arguments, 2); return function() { var a = merge(args, arguments); return __method.apply(context, a); } } /* ------------------------------------ */ /* Import from Prototype.js */ /* ------------------------------------ */ var emptyFunction = function(){}; var Class = (function() { // Some versions of JScript fail to enumerate over properties, names of which // correspond to non-enumerable properties in the prototype chain var IS_DONTENUM_BUGGY = (function(){ for (var p in { toString: 1 }) { // check actual property name, so that it works with augmented Object.prototype if (p === 'toString') return false; } return true; })(); function subclass() {}; function create() { var parent = null, properties = [].slice.apply(arguments); if (isFunction(properties[0])) parent = properties.shift(); function klass() { this.initialize.apply(this, arguments); } extend(klass, Class.Methods); klass.superclass = parent; klass.subclasses = []; if (parent) { subclass.prototype = parent.prototype; klass.prototype = new subclass; try { parent.subclasses.push(klass) } catch(e) {} } for (var i = 0, length = properties.length; i < length; i++) klass.addMethods(properties[i]); if (!klass.prototype.initialize) klass.prototype.initialize = emptyFunction; klass.prototype.constructor = klass; return klass; } function addMethods(source) { var ancestor = this.superclass && this.superclass.prototype, properties = keys(source); // IE6 doesn't enumerate `toString` and `valueOf` (among other built-in `Object.prototype`) properties, // Force copy if they're not Object.prototype ones. // Do not copy other Object.prototype.* for performance reasons if (IS_DONTENUM_BUGGY) { if (source.toString != Object.prototype.toString) properties.push("toString"); if (source.valueOf != Object.prototype.valueOf) properties.push("valueOf"); } for (var i = 0, length = properties.length; i < length; i++) { var property = properties[i], value = source[property]; if (ancestor && isFunction(value) && argumentNames(value)[0] == "$super") { var method = value; value = wrap((function(m) { return function() { return ancestor[m].apply(this, arguments); }; })(property), method); value.valueOf = bind(method.valueOf, method); value.toString = bind(method.toString, method); } this.prototype[property] = value; } return this; } return { create: create, Methods: { addMethods: addMethods } }; })(); if (globalContext.exports) { globalContext.exports.Class = Class; } else { globalContext.Class = Class; } })(Rickshaw); Rickshaw.namespace('Rickshaw.Compat.ClassList'); Rickshaw.Compat.ClassList = function() { /* adapted from http://purl.eligrey.com/github/classList.js/blob/master/classList.js */ if (typeof document !== "undefined" && !("classList" in document.createElement("a"))) { (function (view) { "use strict"; var classListProp = "classList" , protoProp = "prototype" , elemCtrProto = (view.HTMLElement || view.Element)[protoProp] , objCtr = Object , strTrim = String[protoProp].trim || function () { return this.replace(/^\s+|\s+$/g, ""); } , arrIndexOf = Array[protoProp].indexOf || function (item) { var i = 0 , len = this.length ; for (; i < len; i++) { if (i in this && this[i] === item) { return i; } } return -1; } // Vendors: please allow content code to instantiate DOMExceptions , DOMEx = function (type, message) { this.name = type; this.code = DOMException[type]; this.message = message; } , checkTokenAndGetIndex = function (classList, token) { if (token === "") { throw new DOMEx( "SYNTAX_ERR" , "An invalid or illegal string was specified" ); } if (/\s/.test(token)) { throw new DOMEx( "INVALID_CHARACTER_ERR" , "String contains an invalid character" ); } return arrIndexOf.call(classList, token); } , ClassList = function (elem) { var trimmedClasses = strTrim.call(elem.className) , classes = trimmedClasses ? trimmedClasses.split(/\s+/) : [] , i = 0 , len = classes.length ; for (; i < len; i++) { this.push(classes[i]); } this._updateClassName = function () { elem.className = this.toString(); }; } , classListProto = ClassList[protoProp] = [] , classListGetter = function () { return new ClassList(this); } ; // Most DOMException implementations don't allow calling DOMException's toString() // on non-DOMExceptions. Error's toString() is sufficient here. DOMEx[protoProp] = Error[protoProp]; classListProto.item = function (i) { return this[i] || null; }; classListProto.contains = function (token) { token += ""; return checkTokenAndGetIndex(this, token) !== -1; }; classListProto.add = function (token) { token += ""; if (checkTokenAndGetIndex(this, token) === -1) { this.push(token); this._updateClassName(); } }; classListProto.remove = function (token) { token += ""; var index = checkTokenAndGetIndex(this, token); if (index !== -1) { this.splice(index, 1); this._updateClassName(); } }; classListProto.toggle = function (token) { token += ""; if (checkTokenAndGetIndex(this, token) === -1) { this.add(token); } else { this.remove(token); } }; classListProto.toString = function () { return this.join(" "); }; if (objCtr.defineProperty) { var classListPropDesc = { get: classListGetter , enumerable: true , configurable: true }; try { objCtr.defineProperty(elemCtrProto, classListProp, classListPropDesc); } catch (ex) { // IE 8 doesn't support enumerable:true if (ex.number === -0x7FF5EC54) { classListPropDesc.enumerable = false; objCtr.defineProperty(elemCtrProto, classListProp, classListPropDesc); } } } else if (objCtr[protoProp].__defineGetter__) { elemCtrProto.__defineGetter__(classListProp, classListGetter); } }(window)); } }; if ( (typeof RICKSHAW_NO_COMPAT !== "undefined" && !RICKSHAW_NO_COMPAT) || typeof RICKSHAW_NO_COMPAT === "undefined") { new Rickshaw.Compat.ClassList(); } Rickshaw.namespace('Rickshaw.Graph'); Rickshaw.Graph = function(args) { this.element = args.element; this.series = args.series; this.defaults = { interpolation: 'cardinal', offset: 'zero', min: undefined, max: undefined }; Rickshaw.keys(this.defaults).forEach( function(k) { this[k] = args[k] || this.defaults[k]; }, this ); this.window = {}; this.updateCallbacks = []; var self = this; this.initialize = function(args) { this.validateSeries(args.series); this.series.active = function() { return self.series.filter( function(s) { return !s.disabled } ) }; this.setSize({ width: args.width, height: args.height }); this.element.classList.add('rickshaw_graph'); this.vis = d3.select(this.element) .append("svg:svg") .attr('width', this.width) .attr('height', this.height); var renderers = [ Rickshaw.Graph.Renderer.Stack, Rickshaw.Graph.Renderer.Line, Rickshaw.Graph.Renderer.Bar, Rickshaw.Graph.Renderer.Area, Rickshaw.Graph.Renderer.ScatterPlot ]; renderers.forEach( function(r) { if (!r) return; self.registerRenderer(new r( { graph: self } )); } ); this.setRenderer(args.renderer || 'stack', args); this.discoverRange(); }; this.validateSeries = function(series) { if (!(series instanceof Array) && !(series instanceof Rickshaw.Series)) { var seriesSignature = Object.prototype.toString.apply(series); throw "series is not an array: " + seriesSignature; } var pointsCount; series.forEach( function(s) { if (!(s instanceof Object)) { throw "series element is not an object: " + s; } if (!(s.data)) { throw "series has no data: " + JSON.stringify(s); } if (!(s.data instanceof Array)) { throw "series data is not an array: " + JSON.stringify(s.data); } pointsCount = pointsCount || s.data.length; if (pointsCount && s.data.length != pointsCount) { throw "series cannot have differing numbers of points: " + pointsCount + " vs " + s.data.length + "; see Rickshaw.Series.zeroFill()"; } var dataTypeX = typeof s.data[0].x; var dataTypeY = typeof s.data[0].y; if (dataTypeX != 'number' || dataTypeY != 'number') { throw "x and y properties of points should be numbers instead of " + dataTypeX + " and " + dataTypeY; } } ); }; this.dataDomain = function() { // take from the first series var data = this.series[0].data; return [ data[0].x, data.slice(-1).shift().x ]; }; this.discoverRange = function() { var domain = this.renderer.domain(); this.x = d3.scale.linear().domain(domain.x).range([0, this.width]); this.y = d3.scale.linear().domain(domain.y).range([this.height, 0]); this.y.magnitude = d3.scale.linear() .domain([domain.y[0] - domain.y[0], domain.y[1] - domain.y[0]]) .range([0, this.height]); }; this.render = function() { var stackedData = this.stackData(); this.discoverRange(); this.renderer.render(); this.updateCallbacks.forEach( function(callback) { callback(); } ); }; this.update = this.render; this.stackData = function() { var data = this.series.active() .map( function(d) { return d.data } ) .map( function(d) { return d.filter( function(d) { return this._slice(d) }, this ) }, this); this.stackData.hooks.data.forEach( function(entry) { data = entry.f.apply(self, [data]); } ); var layout = d3.layout.stack(); layout.offset( self.offset ); var stackedData = layout(data); this.stackData.hooks.after.forEach( function(entry) { stackedData = entry.f.apply(self, [data]); } ); var i = 0; this.series.forEach( function(series) { if (series.disabled) return; series.stack = stackedData[i++]; } ); this.stackedData = stackedData; return stackedData; }; this.stackData.hooks = { data: [], after: [] }; this._slice = function(d) { if (this.window.xMin || this.window.xMax) { var isInRange = true; if (this.window.xMin && d.x < this.window.xMin) isInRange = false; if (this.window.xMax && d.x > this.window.xMax) isInRange = false; return isInRange; } return true; }; this.onUpdate = function(callback) { this.updateCallbacks.push(callback); }; this.registerRenderer = function(renderer) { this._renderers = this._renderers || {}; this._renderers[renderer.name] = renderer; }; this.configure = function(args) { if (args.width || args.height) { this.setSize(args); } Rickshaw.keys(this.defaults).forEach( function(k) { this[k] = k in args ? args[k] : k in this ? this[k] : this.defaults[k]; }, this ); this.setRenderer(args.renderer || this.renderer.name, args); }; this.setRenderer = function(name, args) { if (!this._renderers[name]) { throw "couldn't find renderer " + name; } this.renderer = this._renderers[name]; if (typeof args == 'object') { this.renderer.configure(args); } }; this.setSize = function(args) { args = args || {}; if (typeof window !== undefined) { var style = window.getComputedStyle(this.element, null); var elementWidth = parseInt(style.getPropertyValue('width')); var elementHeight = parseInt(style.getPropertyValue('height')); } this.width = args.width || elementWidth || 400; this.height = args.height || elementHeight || 250; this.vis && this.vis .attr('width', this.width) .attr('height', this.height); } this.initialize(args); }; Rickshaw.namespace('Rickshaw.Fixtures.Color'); Rickshaw.Fixtures.Color = function() { this.schemes = {}; this.schemes.spectrum14 = [ '#ecb796', '#dc8f70', '#b2a470', '#92875a', '#716c49', '#d2ed82', '#bbe468', '#a1d05d', '#e7cbe6', '#d8aad6', '#a888c2', '#9dc2d3', '#649eb9', '#387aa3' ].reverse(); this.schemes.spectrum2000 = [ '#57306f', '#514c76', '#646583', '#738394', '#6b9c7d', '#84b665', '#a7ca50', '#bfe746', '#e2f528', '#fff726', '#ecdd00', '#d4b11d', '#de8800', '#de4800', '#c91515', '#9a0000', '#7b0429', '#580839', '#31082b' ]; this.schemes.spectrum2001 = [ '#2f243f', '#3c2c55', '#4a3768', '#565270', '#6b6b7c', '#72957f', '#86ad6e', '#a1bc5e', '#b8d954', '#d3e04e', '#ccad2a', '#cc8412', '#c1521d', '#ad3821', '#8a1010', '#681717', '#531e1e', '#3d1818', '#320a1b' ]; this.schemes.classic9 = [ '#423d4f', '#4a6860', '#848f39', '#a2b73c', '#ddcb53', '#c5a32f', '#7d5836', '#963b20', '#7c2626', '#491d37', '#2f254a' ].reverse(); this.schemes.httpStatus = { 503: '#ea5029', 502: '#d23f14', 500: '#bf3613', 410: '#efacea', 409: '#e291dc', 403: '#f457e8', 408: '#e121d2', 401: '#b92dae', 405: '#f47ceb', 404: '#a82a9f', 400: '#b263c6', 301: '#6fa024', 302: '#87c32b', 307: '#a0d84c', 304: '#28b55c', 200: '#1a4f74', 206: '#27839f', 201: '#52adc9', 202: '#7c979f', 203: '#a5b8bd', 204: '#c1cdd1' }; this.schemes.colorwheel = [ '#b5b6a9', '#858772', '#785f43', '#96557e', '#4682b4', '#65b9ac', '#73c03a', '#cb513a' ].reverse(); this.schemes.cool = [ '#5e9d2f', '#73c03a', '#4682b4', '#7bc3b8', '#a9884e', '#c1b266', '#a47493', '#c09fb5' ]; this.schemes.munin = [ '#00cc00', '#0066b3', '#ff8000', '#ffcc00', '#330099', '#990099', '#ccff00', '#ff0000', '#808080', '#008f00', '#00487d', '#b35a00', '#b38f00', '#6b006b', '#8fb300', '#b30000', '#bebebe', '#80ff80', '#80c9ff', '#ffc080', '#ffe680', '#aa80ff', '#ee00cc', '#ff8080', '#666600', '#ffbfff', '#00ffcc', '#cc6699', '#999900' ]; }; Rickshaw.namespace('Rickshaw.Fixtures.RandomData'); Rickshaw.Fixtures.RandomData = function(timeInterval) { var addData; timeInterval = timeInterval || 1; var lastRandomValue = 200; var timeBase = Math.floor(new Date().getTime() / 1000); this.addData = function(data) { var randomValue = Math.random() * 100 + 15 + lastRandomValue; var index = data[0].length; var counter = 1; data.forEach( function(series) { var randomVariance = Math.random() * 20; var v = randomValue / 25 + counter++ + (Math.cos((index * counter * 11) / 960) + 2) * 15 + (Math.cos(index / 7) + 2) * 7 + (Math.cos(index / 17) + 2) * 1; series.push( { x: (index * timeInterval) + timeBase, y: v + randomVariance } ); } ); lastRandomValue = randomValue * .85; } }; Rickshaw.namespace('Rickshaw.Fixtures.Time'); Rickshaw.Fixtures.Time = function() { var tzOffset = new Date().getTimezoneOffset() * 60; var self = this; this.months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']; this.units = [ { name: 'decade', seconds: 86400 * 365.25 * 10, formatter: function(d) { return (parseInt(d.getUTCFullYear() / 10) * 10) } }, { name: 'year', seconds: 86400 * 365.25, formatter: function(d) { return d.getUTCFullYear() } }, { name: 'month', seconds: 86400 * 30.5, formatter: function(d) { return self.months[d.getUTCMonth()] } }, { name: 'week', seconds: 86400 * 7, formatter: function(d) { return self.formatDate(d) } }, { name: 'day', seconds: 86400, formatter: function(d) { return d.getUTCDate() } }, { name: '6 hour', seconds: 3600 * 6, formatter: function(d) { return self.formatTime(d) } }, { name: 'hour', seconds: 3600, formatter: function(d) { return self.formatTime(d) } }, { name: '15 minute', seconds: 60 * 15, formatter: function(d) { return self.formatTime(d) } }, { name: 'minute', seconds: 60, formatter: function(d) { return d.getUTCMinutes() } }, { name: '15 second', seconds: 15, formatter: function(d) { return d.getUTCSeconds() + 's' } }, { name: 'second', seconds: 1, formatter: function(d) { return d.getUTCSeconds() + 's' } } ]; this.unit = function(unitName) { return this.units.filter( function(unit) { return unitName == unit.name } ).shift(); }; this.formatDate = function(d) { return d.toUTCString().match(/, (\w+ \w+ \w+)/)[1]; }; this.formatTime = function(d) { return d.toUTCString().match(/(\d+:\d+):/)[1]; }; this.ceil = function(time, unit) { if (unit.name == 'month') { var nearFuture = new Date((time + unit.seconds - 1) * 1000); var rounded = new Date(0); rounded.setUTCFullYear(nearFuture.getUTCFullYear()); rounded.setUTCMonth(nearFuture.getUTCMonth()); rounded.setUTCDate(1); rounded.setUTCHours(0); rounded.setUTCMinutes(0); rounded.setUTCSeconds(0); rounded.setUTCMilliseconds(0); return rounded.getTime() / 1000; } if (unit.name == 'year') { var nearFuture = new Date((time + unit.seconds - 1) * 1000); var rounded = new Date(0); rounded.setUTCFullYear(nearFuture.getUTCFullYear()); rounded.setUTCMonth(0); rounded.setUTCDate(1); rounded.setUTCHours(0); rounded.setUTCMinutes(0); rounded.setUTCSeconds(0); rounded.setUTCMilliseconds(0); return rounded.getTime() / 1000; } return Math.ceil(time / unit.seconds) * unit.seconds; }; }; Rickshaw.namespace('Rickshaw.Fixtures.Number'); Rickshaw.Fixtures.Number.formatKMBT = function(y) { if (y >= 1000000000000) { return y / 1000000000000 + "T" } else if (y >= 1000000000) { return y / 1000000000 + "B" } else if (y >= 1000000) { return y / 1000000 + "M" } else if (y >= 1000) { return y / 1000 + "K" } else if (y < 1 && y > 0) { return y.toFixed(2) } else if (y == 0) { return '' } else { return y } }; Rickshaw.Fixtures.Number.formatBase1024KMGTP = function(y) { if (y >= 1125899906842624) { return y / 1125899906842624 + "P" } else if (y >= 1099511627776){ return y / 1099511627776 + "T" } else if (y >= 1073741824) { return y / 1073741824 + "G" } else if (y >= 1048576) { return y / 1048576 + "M" } else if (y >= 1024) { return y / 1024 + "K" } else if (y < 1 && y > 0) { return y.toFixed(2) } else if (y == 0) { return '' } else { return y } }; Rickshaw.namespace("Rickshaw.Color.Palette"); Rickshaw.Color.Palette = function(args) { var color = new Rickshaw.Fixtures.Color(); args = args || {}; this.schemes = {}; this.scheme = color.schemes[args.scheme] || args.scheme || color.schemes.colorwheel; this.runningIndex = 0; this.generatorIndex = 0; if (args.interpolatedStopCount) { var schemeCount = this.scheme.length - 1; var i, j, scheme = []; for (i = 0; i < schemeCount; i++) { scheme.push(this.scheme[i]); var generator = d3.interpolateHsl(this.scheme[i], this.scheme[i + 1]); for (j = 1; j < args.interpolatedStopCount; j++) { scheme.push(generator((1 / args.interpolatedStopCount) * j)); } } scheme.push(this.scheme[this.scheme.length - 1]); this.scheme = scheme; } this.rotateCount = this.scheme.length; this.color = function(key) { return this.scheme[key] || this.scheme[this.runningIndex++] || this.interpolateColor() || '#808080'; }; this.interpolateColor = function() { if (!Array.isArray(this.scheme)) return; var color; if (this.generatorIndex == this.rotateCount * 2 - 1) { color = d3.interpolateHsl(this.scheme[this.generatorIndex], this.scheme[0])(0.5); this.generatorIndex = 0; this.rotateCount *= 2; } else { color = d3.interpolateHsl(this.scheme[this.generatorIndex], this.scheme[this.generatorIndex + 1])(0.5); this.generatorIndex++; } this.scheme.push(color); return color; }; }; Rickshaw.namespace('Rickshaw.Graph.Ajax'); Rickshaw.Graph.Ajax = Rickshaw.Class.create( { initialize: function(args) { this.dataURL = args.dataURL; this.onData = args.onData || function(d) { return d }; this.onComplete = args.onComplete || function() {}; this.onError = args.onError || function() {}; this.args = args; // pass through to Rickshaw.Graph this.request(); }, request: function() { $.ajax( { url: this.dataURL, dataType: 'json', success: this.success.bind(this), error: this.error.bind(this) } ); }, error: function() { console.log("error loading dataURL: " + this.dataURL); this.onError(this); }, success: function(data, status) { data = this.onData(data); this.args.series = this._splice({ data: data, series: this.args.series }); this.graph = new Rickshaw.Graph(this.args); this.graph.render(); this.onComplete(this); }, _splice: function(args) { var data = args.data; var series = args.series; if (!args.series) return data; series.forEach( function(s) { var seriesKey = s.key || s.name; if (!seriesKey) throw "series needs a key or a name"; data.forEach( function(d) { var dataKey = d.key || d.name; if (!dataKey) throw "data needs a key or a name"; if (seriesKey == dataKey) { var properties = ['color', 'name', 'data']; properties.forEach( function(p) { s[p] = s[p] || d[p]; } ); } } ); } ); return series; } } ); Rickshaw.namespace('Rickshaw.Graph.Annotate'); Rickshaw.Graph.Annotate = function(args) { var graph = this.graph = args.graph; this.elements = { timeline: args.element }; var self = this; this.data = {}; this.elements.timeline.classList.add('rickshaw_annotation_timeline'); this.add = function(time, content, end_time) { self.data[time] = self.data[time] || {'boxes': []}; self.data[time].boxes.push({content: content, end: end_time}); }; this.update = function() { Rickshaw.keys(self.data).forEach( function(time) { var annotation = self.data[time]; var left = self.graph.x(time); if (left < 0 || left > self.graph.x.range()[1]) { if (annotation.element) { annotation.line.classList.add('offscreen'); annotation.element.style.display = 'none'; } annotation.boxes.forEach( function(box) { if ( box.rangeElement ) box.rangeElement.classList.add('offscreen'); }); return; } if (!annotation.element) { var element = annotation.element = document.createElement('div'); element.classList.add('annotation'); this.elements.timeline.appendChild(element); element.addEventListener('click', function(e) { element.classList.toggle('active'); annotation.line.classList.toggle('active'); annotation.boxes.forEach( function(box) { if ( box.rangeElement ) box.rangeElement.classList.toggle('active'); }); }, false); } annotation.element.style.left = left + 'px'; annotation.element.style.display = 'block'; annotation.boxes.forEach( function(box) { var element = box.element; if (!element) { element = box.element = document.createElement('div'); element.classList.add('content'); element.innerHTML = box.content; annotation.element.appendChild(element); annotation.line = document.createElement('div'); annotation.line.classList.add('annotation_line'); self.graph.element.appendChild(annotation.line); if ( box.end ) { box.rangeElement = document.createElement('div'); box.rangeElement.classList.add('annotation_range'); self.graph.element.appendChild(box.rangeElement); } } if ( box.end ) { var annotationRangeStart = left; var annotationRangeEnd = Math.min( self.graph.x(box.end), self.graph.x.range()[1] ); // annotation makes more sense at end if ( annotationRangeStart > annotationRangeEnd ) { annotationRangeEnd = left; annotationRangeStart = Math.max( self.graph.x(box.end), self.graph.x.range()[0] ); } var annotationRangeWidth = annotationRangeEnd - annotationRangeStart; box.rangeElement.style.left = annotationRangeStart + 'px'; box.rangeElement.style.width = annotationRangeWidth + 'px' box.rangeElement.classList.remove('offscreen'); } annotation.line.classList.remove('offscreen'); annotation.line.style.left = left + 'px'; } ); }, this ); }; this.graph.onUpdate( function() { self.update() } ); }; Rickshaw.namespace('Rickshaw.Graph.Axis.Time'); Rickshaw.Graph.Axis.Time = function(args) { var self = this; this.graph = args.graph; this.elements = []; this.ticksTreatment = args.ticksTreatment || 'plain'; this.fixedTimeUnit = args.timeUnit; var time = new Rickshaw.Fixtures.Time(); this.appropriateTimeUnit = function() { var unit; var units = time.units; var domain = this.graph.x.domain(); var rangeSeconds = domain[1] - domain[0]; units.forEach( function(u) { if (Math.floor(rangeSeconds / u.seconds) >= 2) { unit = unit || u; } } ); return (unit || time.units[time.units.length - 1]); }; this.tickOffsets = function() { var domain = this.graph.x.domain(); var unit = this.fixedTimeUnit || this.appropriateTimeUnit(); var count = Math.ceil((domain[1] - domain[0]) / unit.seconds); var runningTick = domain[0]; var offsets = []; for (var i = 0; i < count; i++) { tickValue = time.ceil(runningTick, unit); runningTick = tickValue + unit.seconds / 2; offsets.push( { value: tickValue, unit: unit } ); } return offsets; }; this.render = function() { this.elements.forEach( function(e) { e.parentNode.removeChild(e); } ); this.elements = []; var offsets = this.tickOffsets(); offsets.forEach( function(o) { if (self.graph.x(o.value) > self.graph.x.range()[1]) return; var element = document.createElement('div'); element.style.left = self.graph.x(o.value) + 'px'; element.classList.add('x_tick'); element.classList.add(self.ticksTreatment); var title = document.createElement('div'); title.classList.add('title'); title.innerHTML = o.unit.formatter(new Date(o.value * 1000)); element.appendChild(title); self.graph.element.appendChild(element); self.elements.push(element); } ); }; this.graph.onUpdate( function() { self.render() } ); }; Rickshaw.namespace('Rickshaw.Graph.Axis.Y'); Rickshaw.Graph.Axis.Y = function(args) { var self = this; var berthRate = 0.10; this.initialize = function(args) { this.graph = args.graph; this.orientation = args.orientation || 'right'; var pixelsPerTick = args.pixelsPerTick || 75; this.ticks = args.ticks || Math.floor(this.graph.height / pixelsPerTick); this.tickSize = args.tickSize || 4; this.ticksTreatment = args.ticksTreatment || 'plain'; if (args.element) { this.element = args.element; this.vis = d3.select(args.element) .append("svg:svg") .attr('class', 'rickshaw_graph y_axis'); this.element = this.vis[0][0]; this.element.style.position = 'relative'; this.setSize({ width: args.width, height: args.height }); } else { this.vis = this.graph.vis; } this.graph.onUpdate( function() { self.render() } ); }; this.setSize = function(args) { args = args || {}; if (!this.element) return; if (typeof window !== 'undefined') { var style = window.getComputedStyle(this.element.parentNode, null); var elementWidth = parseInt(style.getPropertyValue('width')); if (!args.auto) { var elementHeight = parseInt(style.getPropertyValue('height')); } } this.width = args.width || elementWidth || this.graph.width * berthRate; this.height = args.height || elementHeight || this.graph.height; this.vis .attr('width', this.width) .attr('height', this.height * (1 + berthRate)); var berth = this.height * berthRate; this.element.style.top = -1 * berth + 'px'; }; this.render = function() { if (this.graph.height !== this._renderHeight) this.setSize({ auto: true }); var axis = d3.svg.axis().scale(this.graph.y).orient(this.orientation); axis.tickFormat( args.tickFormat || function(y) { return y } ); if (this.orientation == 'left') { var berth = this.height * berthRate; var transform = 'translate(' + this.width + ', ' + berth + ')'; } if (this.element) { this.vis.selectAll('*').remove(); } this.vis .append("svg:g") .attr("class", ["y_ticks", this.ticksTreatment].join(" ")) .attr("transform", transform) .call(axis.ticks(this.ticks).tickSubdivide(0).tickSize(this.tickSize)) var gridSize = (this.orientation == 'right' ? 1 : -1) * this.graph.width; this.graph.vis .append("svg:g") .attr("class", "y_grid") .call(axis.ticks(this.ticks).tickSubdivide(0).tickSize(gridSize)); this._renderHeight = this.graph.height; }; this.initialize(args); }; Rickshaw.namespace('Rickshaw.Graph.Behavior.Series.Highlight'); Rickshaw.Graph.Behavior.Series.Highlight = function(args) { this.graph = args.graph; this.legend = args.legend; var self = this; var colorSafe = {}; this.addHighlightEvents = function (l) { l.element.addEventListener( 'mouseover', function(e) { self.legend.lines.forEach( function(line) { if (l === line) return; colorSafe[line.series.name] = colorSafe[line.series.name] || line.series.color; line.series.color = d3.interpolateRgb(line.series.color, d3.rgb('#d8d8d8'))(0.8).toString(); } ); self.graph.update(); }, false ); l.element.addEventListener( 'mouseout', function(e) { self.legend.lines.forEach( function(line) { if (colorSafe[line.series.name]) { line.series.color = colorSafe[line.series.name]; } } ); self.graph.update(); }, false ); }; if (this.legend) { this.legend.lines.forEach( function(l) { self.addHighlightEvents(l); } ); } }; Rickshaw.namespace('Rickshaw.Graph.Behavior.Series.Order'); Rickshaw.Graph.Behavior.Series.Order = function(args) { this.graph = args.graph; this.legend = args.legend; var self = this; $(function() { $(self.legend.list).sortable( { containment: 'parent', tolerance: 'pointer', update: function( event, ui ) { var series = []; $(self.legend.list).find('li').each( function(index, item) { if (!item.series) return; series.push(item.series); } ); for (var i = self.graph.series.length - 1; i >= 0; i--) { self.graph.series[i] = series.shift(); } self.graph.update(); } } ); $(self.legend.list).disableSelection(); }); //hack to make jquery-ui sortable behave this.graph.onUpdate( function() { var h = window.getComputedStyle(self.legend.element).height; self.legend.element.style.height = h; } ); }; Rickshaw.namespace('Rickshaw.Graph.Behavior.Series.Toggle'); Rickshaw.Graph.Behavior.Series.Toggle = function(args) { this.graph = args.graph; this.legend = args.legend; var self = this; this.addAnchor = function(line) { var anchor = document.createElement('a'); anchor.innerHTML = '✔'; anchor.classList.add('action'); line.element.insertBefore(anchor, line.element.firstChild); anchor.onclick = function(e) { if (line.series.disabled) { line.series.enable(); line.element.classList.remove('disabled'); } else { line.series.disable(); line.element.classList.add('disabled'); } } var label = line.element.getElementsByTagName('span')[0]; label.onclick = function(e){ var disableAllOtherLines = line.series.disabled; if ( ! disableAllOtherLines ) { for ( var i = 0; i < self.legend.lines.length; i++ ) { var l = self.legend.lines[i]; if ( line.series === l.series ) { // noop } else if ( l.series.disabled ) { // noop } else { disableAllOtherLines = true; break; } } } // show all or none if ( disableAllOtherLines ) { // these must happen first or else we try ( and probably fail ) to make a no line graph line.series.enable(); line.element.classList.remove('disabled'); self.legend.lines.forEach(function(l){ if ( line.series === l.series ) { // noop } else { l.series.disable(); l.element.classList.add('disabled'); } }); } else { self.legend.lines.forEach(function(l){ l.series.enable(); l.element.classList.remove('disabled'); }); } }; }; if (this.legend) { $(this.legend.list).sortable( { start: function(event, ui) { ui.item.bind('no.onclick', function(event) { event.preventDefault(); } ); }, stop: function(event, ui) { setTimeout(function(){ ui.item.unbind('no.onclick'); }, 250); } }) this.legend.lines.forEach( function(l) { self.addAnchor(l); } ); } this._addBehavior = function() { this.graph.series.forEach( function(s) { s.disable = function() { if (self.graph.series.length <= 1) { throw('only one series left'); } s.disabled = true; self.graph.update(); }; s.enable = function() { s.disabled = false; self.graph.update(); }; } ); }; this._addBehavior(); this.updateBehaviour = function () { this._addBehavior() }; }; Rickshaw.namespace('Rickshaw.Graph.HoverDetail'); Rickshaw.Graph.HoverDetail = Rickshaw.Class.create({ initialize: function(args) { var graph = this.graph = args.graph; this.xFormatter = args.xFormatter || function(x) { return new Date( x * 1000 ).toUTCString(); }; this.yFormatter = args.yFormatter || function(y) { return y.toFixed(2); }; var element = this.element = document.createElement('div'); element.className = 'detail'; this.visible = true; graph.element.appendChild(element); this.lastEvent = null; this._addListeners(); this.onShow = args.onShow; this.onHide = args.onHide; this.onRender = args.onRender; this.formatter = args.formatter || this.formatter; }, formatter: function(series, x, y, formattedX, formattedY, d) { return series.name + ': ' + formattedY; }, update: function(e) { e = e || this.lastEvent; if (!e) return; this.lastEvent = e; if (!e.target.nodeName.match(/^(path|svg|rect)$/)) return; var graph = this.graph; var eventX = e.offsetX || e.layerX; var eventY = e.offsetY || e.layerY; var domainX = graph.x.invert(eventX); var stackedData = graph.stackedData; var topSeriesData = stackedData.slice(-1).shift(); var domainIndexScale = d3.scale.linear() .domain([topSeriesData[0].x, topSeriesData.slice(-1).shift().x]) .range([0, topSeriesData.length]); var approximateIndex = Math.floor(domainIndexScale(domainX)); var dataIndex = Math.min(approximateIndex || 0, stackedData[0].length - 1); for (var i = approximateIndex; i < stackedData[0].length - 1;) { if (!stackedData[0][i] || !stackedData[0][i + 1]) { break; } if (stackedData[0][i].x <= domainX && stackedData[0][i + 1].x > domainX) { dataIndex = i; break; } if (stackedData[0][i + 1] <= domainX) { i++ } else { i-- } } var domainX = stackedData[0][dataIndex].x; var formattedXValue = this.xFormatter(domainX); var graphX = graph.x(domainX); var order = 0; var detail = graph.series.active() .map( function(s) { return { order: order++, series: s, name: s.name, value: s.stack[dataIndex] } } ); var activeItem; var sortFn = function(a, b) { return (a.value.y0 + a.value.y) - (b.value.y0 + b.value.y); }; var domainMouseY = graph.y.magnitude.invert(graph.element.offsetHeight - eventY); detail.sort(sortFn).forEach( function(d) { d.formattedYValue = (this.yFormatter.constructor == Array) ? this.yFormatter[detail.indexOf(d)](d.value.y) : this.yFormatter(d.value.y); d.graphX = graphX; d.graphY = graph.y(d.value.y0 + d.value.y); if (domainMouseY > d.value.y0 && domainMouseY < d.value.y0 + d.value.y && !activeItem) { activeItem = d; d.active = true; } }, this ); this.element.innerHTML = ''; this.element.style.left = graph.x(domainX) + 'px'; if (this.visible) { this.render( { detail: detail, domainX: domainX, formattedXValue: formattedXValue, mouseX: eventX, mouseY: eventY } ); } }, hide: function() { this.visible = false; this.element.classList.add('inactive'); if (typeof this.onHide == 'function') { this.onHide(); } }, show: function() { this.visible = true; this.element.classList.remove('inactive'); if (typeof this.onShow == 'function') { this.onShow(); } }, render: function(args) { var detail = args.detail; var domainX = args.domainX; var mouseX = args.mouseX; var mouseY = args.mouseY; var formattedXValue = args.formattedXValue; var xLabel = document.createElement('div'); xLabel.className = 'x_label'; xLabel.innerHTML = formattedXValue; this.element.appendChild(xLabel); detail.forEach( function(d) { var item = document.createElement('div'); item.className = 'item'; item.innerHTML = this.formatter(d.series, domainX, d.value.y, formattedXValue, d.formattedYValue, d); item.style.top = this.graph.y(d.value.y0 + d.value.y) + 'px'; this.element.appendChild(item); var dot = document.createElement('div'); dot.className = 'dot'; dot.style.top = item.style.top; dot.style.borderColor = d.series.color; this.element.appendChild(dot); if (d.active) { item.className = 'item active'; dot.className = 'dot active'; } }, this ); this.show(); if (typeof this.onRender == 'function') { this.onRender(args); } }, _addListeners: function() { this.graph.element.addEventListener( 'mousemove', function(e) { this.visible = true; this.update(e) }.bind(this), false ); this.graph.onUpdate( function() { this.update() }.bind(this) ); this.graph.element.addEventListener( 'mouseout', function(e) { if (e.relatedTarget && !(e.relatedTarget.compareDocumentPosition(this.graph.element) & Node.DOCUMENT_POSITION_CONTAINS)) { this.hide(); } }.bind(this), false ); } }); Rickshaw.namespace('Rickshaw.Graph.JSONP'); Rickshaw.Graph.JSONP = Rickshaw.Class.create( Rickshaw.Graph.Ajax, { request: function() { $.ajax( { url: this.dataURL, dataType: 'jsonp', success: this.success.bind(this), error: this.error.bind(this) } ); } } ); Rickshaw.namespace('Rickshaw.Graph.Legend'); Rickshaw.Graph.Legend = function(args) { var element = this.element = args.element; var graph = this.graph = args.graph; var self = this; element.classList.add('rickshaw_legend'); var list = this.list = document.createElement('ul'); element.appendChild(list); var series = graph.series .map( function(s) { return s } ) .reverse(); this.lines = []; this.addLine = function (series) { var line = document.createElement('li'); line.className = 'line'; var swatch = document.createElement('div'); swatch.className = 'swatch'; swatch.style.backgroundColor = series.color; line.appendChild(swatch); var label = document.createElement('span'); label.className = 'label'; label.innerHTML = series.name; line.appendChild(label); list.appendChild(line); line.series = series; if (series.noLegend) { line.style.display = 'none'; } var _line = { element: line, series: series }; if (self.shelving) { self.shelving.addAnchor(_line); self.shelving.updateBehaviour(); } if (self.highlighter) { self.highlighter.addHighlightEvents(_line); } self.lines.push(_line); }; series.forEach( function(s) { self.addLine(s); } ); graph.onUpdate( function() {} ); }; Rickshaw.namespace('Rickshaw.Graph.RangeSlider'); Rickshaw.Graph.RangeSlider = function(args) { var element = this.element = args.element; var graph = this.graph = args.graph; $( function() { $(element).slider( { range: true, min: graph.dataDomain()[0], max: graph.dataDomain()[1], values: [ graph.dataDomain()[0], graph.dataDomain()[1] ], slide: function( event, ui ) { graph.window.xMin = ui.values[0]; graph.window.xMax = ui.values[1]; graph.update(); // if we're at an extreme, stick there if (graph.dataDomain()[0] == ui.values[0]) { graph.window.xMin = undefined; } if (graph.dataDomain()[1] == ui.values[1]) { graph.window.xMax = undefined; } } } ); } ); element[0].style.width = graph.width + 'px'; graph.onUpdate( function() { var values = $(element).slider('option', 'values'); $(element).slider('option', 'min', graph.dataDomain()[0]); $(element).slider('option', 'max', graph.dataDomain()[1]); if (graph.window.xMin == undefined) { values[0] = graph.dataDomain()[0]; } if (graph.window.xMax == undefined) { values[1] = graph.dataDomain()[1]; } $(element).slider('option', 'values', values); } ); }; Rickshaw.namespace("Rickshaw.Graph.Renderer"); Rickshaw.Graph.Renderer = Rickshaw.Class.create( { initialize: function(args) { this.graph = args.graph; this.tension = args.tension || this.tension; this.graph.unstacker = this.graph.unstacker || new Rickshaw.Graph.Unstacker( { graph: this.graph } ); this.configure(args); }, seriesPathFactory: function() { //implement in subclass }, seriesStrokeFactory: function() { // implement in subclass }, defaults: function() { return { tension: 0.8, strokeWidth: 2, unstack: true, padding: { top: 0.01, right: 0, bottom: 0.01, left: 0 }, stroke: false, fill: false }; }, domain: function() { var values = []; var stackedData = this.graph.stackedData || this.graph.stackData(); var topSeriesData = this.unstack ? stackedData : [ stackedData.slice(-1).shift() ]; topSeriesData.forEach( function(series) { series.forEach( function(d) { values.push( d.y + d.y0 ); } ); } ); var xMin = stackedData[0][0].x; var xMax = stackedData[0][ stackedData[0].length - 1 ].x; xMin -= (xMax - xMin) * this.padding.left; xMax += (xMax - xMin) * this.padding.right; var yMin = this.graph.min === 'auto' ? d3.min( values ) : this.graph.min || 0; var yMax = this.graph.max || d3.max( values ); if (this.graph.min === 'auto' || yMin < 0) { yMin -= (yMax - yMin) * this.padding.bottom; } if (this.graph.max === undefined) { yMax += (yMax - yMin) * this.padding.top; } return { x: [xMin, xMax], y: [yMin, yMax] }; }, render: function() { var graph = this.graph; graph.vis.selectAll('*').remove(); var nodes = graph.vis.selectAll("path") .data(this.graph.stackedData) .enter().append("svg:path") .attr("d", this.seriesPathFactory()); var i = 0; graph.series.forEach( function(series) { if (series.disabled) return; series.path = nodes[0][i++]; this._styleSeries(series); }, this ); }, _styleSeries: function(series) { var fill = this.fill ? series.color : 'none'; var stroke = this.stroke ? series.color : 'none'; series.path.setAttribute('fill', fill); series.path.setAttribute('stroke', stroke); series.path.setAttribute('stroke-width', this.strokeWidth); series.path.setAttribute('class', series.className); }, configure: function(args) { args = args || {}; Rickshaw.keys(this.defaults()).forEach( function(key) { if (!args.hasOwnProperty(key)) { this[key] = this[key] || this.graph[key] || this.defaults()[key]; return; } if (typeof this.defaults()[key] == 'object') { Rickshaw.keys(this.defaults()[key]).forEach( function(k) { this[key][k] = args[key][k] !== undefined ? args[key][k] : this[key][k] !== undefined ? this[key][k] : this.defaults()[key][k]; }, this ); } else { this[key] = args[key] !== undefined ? args[key] : this[key] !== undefined ? this[key] : this.graph[key] !== undefined ? this.graph[key] : this.defaults()[key]; } }, this ); }, setStrokeWidth: function(strokeWidth) { if (strokeWidth !== undefined) { this.strokeWidth = strokeWidth; } }, setTension: function(tension) { if (tension !== undefined) { this.tension = tension; } } } ); Rickshaw.namespace('Rickshaw.Graph.Renderer.Line'); Rickshaw.Graph.Renderer.Line = Rickshaw.Class.create( Rickshaw.Graph.Renderer, { name: 'line', defaults: function($super) { return Rickshaw.extend( $super(), { unstack: true, fill: false, stroke: true } ); }, seriesPathFactory: function() { var graph = this.graph; return d3.svg.line() .x( function(d) { return graph.x(d.x) } ) .y( function(d) { return graph.y(d.y) } ) .interpolate(this.graph.interpolation).tension(this.tension); } } ); Rickshaw.namespace('Rickshaw.Graph.Renderer.Stack'); Rickshaw.Graph.Renderer.Stack = Rickshaw.Class.create( Rickshaw.Graph.Renderer, { name: 'stack', defaults: function($super) { return Rickshaw.extend( $super(), { fill: true, stroke: false, unstack: false } ); }, seriesPathFactory: function() { var graph = this.graph; return d3.svg.area() .x( function(d) { return graph.x(d.x) } ) .y0( function(d) { return graph.y(d.y0) } ) .y1( function(d) { return graph.y(d.y + d.y0) } ) .interpolate(this.graph.interpolation).tension(this.tension); } } ); Rickshaw.namespace('Rickshaw.Graph.Renderer.Bar'); Rickshaw.Graph.Renderer.Bar = Rickshaw.Class.create( Rickshaw.Graph.Renderer, { name: 'bar', defaults: function($super) { var defaults = Rickshaw.extend( $super(), { gapSize: 0.05, unstack: false } ); delete defaults.tension; return defaults; }, initialize: function($super, args) { args = args || {}; this.gapSize = args.gapSize || this.gapSize; $super(args); }, domain: function($super) { var domain = $super(); var frequentInterval = this._frequentInterval(); domain.x[1] += parseInt(frequentInterval.magnitude); return domain; }, barWidth: function() { var stackedData = this.graph.stackedData || this.graph.stackData(); var data = stackedData.slice(-1).shift(); var frequentInterval = this._frequentInterval(); var barWidth = this.graph.x(data[0].x + frequentInterval.magnitude * (1 - this.gapSize)); return barWidth; }, render: function() { var graph = this.graph; graph.vis.selectAll('*').remove(); var barWidth = this.barWidth(); var barXOffset = 0; var activeSeriesCount = graph.series.filter( function(s) { return !s.disabled; } ).length; var seriesBarWidth = this.unstack ? barWidth / activeSeriesCount : barWidth; var transform = function(d) { // add a matrix transform for negative values var matrix = [ 1, 0, 0, (d.y < 0 ? -1 : 1), 0, (d.y < 0 ? graph.y.magnitude(Math.abs(d.y)) * 2 : 0) ]; return "matrix(" + matrix.join(',') + ")"; }; graph.series.forEach( function(series) { if (series.disabled) return; var nodes = graph.vis.selectAll("path") .data(series.stack) .enter().append("svg:rect") .attr("x", function(d) { return graph.x(d.x) + barXOffset }) .attr("y", function(d) { return (graph.y(d.y0 + Math.abs(d.y))) * (d.y < 0 ? -1 : 1 ) }) .attr("width", seriesBarWidth) .attr("height", function(d) { return graph.y.magnitude(Math.abs(d.y)) }) .attr("transform", transform); Array.prototype.forEach.call(nodes[0], function(n) { n.setAttribute('fill', series.color); } ); if (this.unstack) barXOffset += seriesBarWidth; }, this ); }, _frequentInterval: function() { var stackedData = this.graph.stackedData || this.graph.stackData(); var data = stackedData.slice(-1).shift(); var intervalCounts = {}; for (var i = 0; i < data.length - 1; i++) { var interval = data[i + 1].x - data[i].x; intervalCounts[interval] = intervalCounts[interval] || 0; intervalCounts[interval]++; } var frequentInterval = { count: 0 }; Rickshaw.keys(intervalCounts).forEach( function(i) { if (frequentInterval.count < intervalCounts[i]) { frequentInterval = { count: intervalCounts[i], magnitude: i }; } } ); this._frequentInterval = function() { return frequentInterval }; return frequentInterval; } } ); Rickshaw.namespace('Rickshaw.Graph.Renderer.Area'); Rickshaw.Graph.Renderer.Area = Rickshaw.Class.create( Rickshaw.Graph.Renderer, { name: 'area', defaults: function($super) { return Rickshaw.extend( $super(), { unstack: false, fill: false, stroke: false } ); }, seriesPathFactory: function() { var graph = this.graph; return d3.svg.area() .x( function(d) { return graph.x(d.x) } ) .y0( function(d) { return graph.y(d.y0) } ) .y1( function(d) { return graph.y(d.y + d.y0) } ) .interpolate(graph.interpolation).tension(this.tension); }, seriesStrokeFactory: function() { var graph = this.graph; return d3.svg.line() .x( function(d) { return graph.x(d.x) } ) .y( function(d) { return graph.y(d.y + d.y0) } ) .interpolate(graph.interpolation).tension(this.tension); }, render: function() { var graph = this.graph; graph.vis.selectAll('*').remove(); var nodes = graph.vis.selectAll("path") .data(this.graph.stackedData) .enter().insert("svg:g", 'g'); nodes.append("svg:path") .attr("d", this.seriesPathFactory()) .attr("class", 'area'); if (this.stroke) { nodes.append("svg:path") .attr("d", this.seriesStrokeFactory()) .attr("class", 'line'); } var i = 0; graph.series.forEach( function(series) { if (series.disabled) return; series.path = nodes[0][i++]; this._styleSeries(series); }, this ); }, _styleSeries: function(series) { if (!series.path) return; d3.select(series.path).select('.area') .attr('fill', series.color); if (this.stroke) { d3.select(series.path).select('.line') .attr('fill', 'none') .attr('stroke', series.stroke || d3.interpolateRgb(series.color, 'black')(0.125)) .attr('stroke-width', this.strokeWidth); } if (series.className) { series.path.setAttribute('class', series.className); } } } ); Rickshaw.namespace('Rickshaw.Graph.Renderer.ScatterPlot'); Rickshaw.Graph.Renderer.ScatterPlot = Rickshaw.Class.create( Rickshaw.Graph.Renderer, { name: 'scatterplot', defaults: function($super) { return Rickshaw.extend( $super(), { unstack: true, fill: true, stroke: false, padding:{ top: 0.01, right: 0.01, bottom: 0.01, left: 0.01 }, dotSize: 4 } ); }, initialize: function($super, args) { $super(args); }, render: function() { var graph = this.graph; graph.vis.selectAll('*').remove(); graph.series.forEach( function(series) { if (series.disabled) return; var nodes = graph.vis.selectAll("path") .data(series.stack) .enter().append("svg:circle") .attr("cx", function(d) { return graph.x(d.x) }) .attr("cy", function(d) { return graph.y(d.y) }) .attr("r", function(d) { return ("r" in d) ? d.r : graph.renderer.dotSize}); Array.prototype.forEach.call(nodes[0], function(n) { n.setAttribute('fill', series.color); } ); }, this ); } } ); Rickshaw.namespace('Rickshaw.Graph.Smoother'); Rickshaw.Graph.Smoother = function(args) { this.graph = args.graph; this.element = args.element; var self = this; this.aggregationScale = 1; if (this.element) { $( function() { $(self.element).slider( { min: 1, max: 100, slide: function( event, ui ) { self.setScale(ui.value); self.graph.update(); } } ); } ); } self.graph.stackData.hooks.data.push( { name: 'smoother', orderPosition: 50, f: function(data) { var aggregatedData = []; data.forEach( function(seriesData) { var aggregatedSeriesData = []; while (seriesData.length) { var avgX = 0, avgY = 0; var slice = seriesData.splice(0, self.aggregationScale); slice.forEach( function(d) { avgX += d.x / slice.length; avgY += d.y / slice.length; } ); aggregatedSeriesData.push( { x: avgX, y: avgY } ); } aggregatedData.push(aggregatedSeriesData); } ); return aggregatedData; } } ); this.setScale = function(scale) { if (scale < 1) { throw "scale out of range: " + scale; } this.aggregationScale = scale; this.graph.update(); } }; Rickshaw.namespace('Rickshaw.Graph.Unstacker'); Rickshaw.Graph.Unstacker = function(args) { this.graph = args.graph; var self = this; this.graph.stackData.hooks.after.push( { name: 'unstacker', f: function(data) { if (!self.graph.renderer.unstack) return data; data.forEach( function(seriesData) { seriesData.forEach( function(d) { d.y0 = 0; } ); } ); return data; } } ); }; Rickshaw.namespace('Rickshaw.Series'); Rickshaw.Series = Rickshaw.Class.create( Array, { initialize: function (data, palette, options) { options = options || {} this.palette = new Rickshaw.Color.Palette(palette); this.timeBase = typeof(options.timeBase) === 'undefined' ? Math.floor(new Date().getTime() / 1000) : options.timeBase; var timeInterval = typeof(options.timeInterval) == 'undefined' ? 1000 : options.timeInterval; this.setTimeInterval(timeInterval); if (data && (typeof(data) == "object") && (data instanceof Array)) { data.forEach( function(item) { this.addItem(item) }, this ); } }, addItem: function(item) { if (typeof(item.name) === 'undefined') { throw('addItem() needs a name'); } item.color = (item.color || this.palette.color(item.name)); item.data = (item.data || []); // backfill, if necessary if ((item.data.length == 0) && this.length && (this.getIndex() > 0)) { this[0].data.forEach( function(plot) { item.data.push({ x: plot.x, y: 0 }); } ); } else if (item.data.length == 0) { item.data.push({ x: this.timeBase - (this.timeInterval || 0), y: 0 }); } this.push(item); if (this.legend) { this.legend.addLine(this.itemByName(item.name)); } }, addData: function(data) { var index = this.getIndex(); Rickshaw.keys(data).forEach( function(name) { if (! this.itemByName(name)) { this.addItem({ name: name }); } }, this ); this.forEach( function(item) { item.data.push({ x: (index * this.timeInterval || 1) + this.timeBase, y: (data[item.name] || 0) }); }, this ); }, getIndex: function () { return (this[0] && this[0].data && this[0].data.length) ? this[0].data.length : 0; }, itemByName: function(name) { for (var i = 0; i < this.length; i++) { if (this[i].name == name) return this[i]; } }, setTimeInterval: function(iv) { this.timeInterval = iv / 1000; }, setTimeBase: function (t) { this.timeBase = t; }, dump: function() { var data = { timeBase: this.timeBase, timeInterval: this.timeInterval, items: [] }; this.forEach( function(item) { var newItem = { color: item.color, name: item.name, data: [] }; item.data.forEach( function(plot) { newItem.data.push({ x: plot.x, y: plot.y }); } ); data.items.push(newItem); } ); return data; }, load: function(data) { if (data.timeInterval) { this.timeInterval = data.timeInterval; } if (data.timeBase) { this.timeBase = data.timeBase; } if (data.items) { data.items.forEach( function(item) { this.push(item); if (this.legend) { this.legend.addLine(this.itemByName(item.name)); } }, this ); } } } ); Rickshaw.Series.zeroFill = function(series) { var x; var i = 0; var data = series.map( function(s) { return s.data } ); while ( i < Math.max.apply(null, data.map( function(d) { return d.length } )) ) { x = Math.min.apply( null, data .filter(function(d) { return d[i] }) .map(function(d) { return d[i].x }) ); data.forEach( function(d) { if (!d[i] || d[i].x != x) { d.splice(i, 0, { x: x, y: 0 }); } } ); i++; } }; Rickshaw.namespace('Rickshaw.Series.FixedDuration'); Rickshaw.Series.FixedDuration = Rickshaw.Class.create(Rickshaw.Series, { initialize: function (data, palette, options) { var options = options || {} if (typeof(options.timeInterval) === 'undefined') { throw new Error('FixedDuration series requires timeInterval'); } if (typeof(options.maxDataPoints) === 'undefined') { throw new Error('FixedDuration series requires maxDataPoints'); } this.palette = new Rickshaw.Color.Palette(palette); this.timeBase = typeof(options.timeBase) === 'undefined' ? Math.floor(new Date().getTime() / 1000) : options.timeBase; this.setTimeInterval(options.timeInterval); if (this[0] && this[0].data && this[0].data.length) { this.currentSize = this[0].data.length; this.currentIndex = this[0].data.length; } else { this.currentSize = 0; this.currentIndex = 0; } this.maxDataPoints = options.maxDataPoints; if (data && (typeof(data) == "object") && (data instanceof Array)) { data.forEach( function (item) { this.addItem(item) }, this ); this.currentSize += 1; this.currentIndex += 1; } // reset timeBase for zero-filled values if needed this.timeBase -= (this.maxDataPoints - this.currentSize) * this.timeInterval; // zero-fill up to maxDataPoints size if we don't have that much data yet if ((typeof(this.maxDataPoints) !== 'undefined') && (this.currentSize < this.maxDataPoints)) { for (var i = this.maxDataPoints - this.currentSize - 1; i > 0; i--) { this.currentSize += 1; this.currentIndex += 1; this.forEach( function (item) { item.data.unshift({ x: ((i-1) * this.timeInterval || 1) + this.timeBase, y: 0, i: i }); }, this ); } } }, addData: function($super, data) { $super(data) this.currentSize += 1; this.currentIndex += 1; if (this.maxDataPoints !== undefined) { while (this.currentSize > this.maxDataPoints) { this.dropData(); } } }, dropData: function() { this.forEach(function(item) { item.data.splice(0, 1); } ); this.currentSize -= 1; }, getIndex: function () { return this.currentIndex; } } ); ceilometer-2014.1.5/ceilometer/api/v1/static/d3.v2.js0000664000567000056700000072407212540642615023165 0ustar jenkinsjenkins00000000000000(function() { function d3_class(ctor, properties) { try { for (var key in properties) { Object.defineProperty(ctor.prototype, key, { value: properties[key], enumerable: false }); } } catch (e) { ctor.prototype = properties; } } function d3_arrayCopy(pseudoarray) { var i = -1, n = pseudoarray.length, array = []; while (++i < n) array.push(pseudoarray[i]); return array; } function d3_arraySlice(pseudoarray) { return Array.prototype.slice.call(pseudoarray); } function d3_Map() {} function d3_identity(d) { return d; } function d3_this() { return this; } function d3_true() { return true; } function d3_functor(v) { return typeof v === "function" ? v : function() { return v; }; } function d3_rebind(target, source, method) { return function() { var value = method.apply(source, arguments); return arguments.length ? target : value; }; } function d3_number(x) { return x != null && !isNaN(x); } function d3_zipLength(d) { return d.length; } function d3_splitter(d) { return d == null; } function d3_collapse(s) { return s.trim().replace(/\s+/g, " "); } function d3_range_integerScale(x) { var k = 1; while (x * k % 1) k *= 10; return k; } function d3_dispatch() {} function d3_dispatch_event(dispatch) { function event() { var z = listeners, i = -1, n = z.length, l; while (++i < n) if (l = z[i].on) l.apply(this, arguments); return dispatch; } var listeners = [], listenerByName = new d3_Map; event.on = function(name, listener) { var l = listenerByName.get(name), i; if (arguments.length < 2) return l && l.on; if (l) { l.on = null; listeners = listeners.slice(0, i = listeners.indexOf(l)).concat(listeners.slice(i + 1)); listenerByName.remove(name); } if (listener) listeners.push(listenerByName.set(name, { on: listener })); return dispatch; }; return event; } function d3_format_precision(x, p) { return p - (x ? 1 + Math.floor(Math.log(x + Math.pow(10, 1 + Math.floor(Math.log(x) / Math.LN10) - p)) / Math.LN10) : 1); } function d3_format_typeDefault(x) { return x + ""; } function d3_format_group(value) { var i = value.lastIndexOf("."), f = i >= 0 ? value.substring(i) : (i = value.length, ""), t = []; while (i > 0) t.push(value.substring(i -= 3, i + 3)); return t.reverse().join(",") + f; } function d3_formatPrefix(d, i) { var k = Math.pow(10, Math.abs(8 - i) * 3); return { scale: i > 8 ? function(d) { return d / k; } : function(d) { return d * k; }, symbol: d }; } function d3_ease_clamp(f) { return function(t) { return t <= 0 ? 0 : t >= 1 ? 1 : f(t); }; } function d3_ease_reverse(f) { return function(t) { return 1 - f(1 - t); }; } function d3_ease_reflect(f) { return function(t) { return .5 * (t < .5 ? f(2 * t) : 2 - f(2 - 2 * t)); }; } function d3_ease_identity(t) { return t; } function d3_ease_poly(e) { return function(t) { return Math.pow(t, e); }; } function d3_ease_sin(t) { return 1 - Math.cos(t * Math.PI / 2); } function d3_ease_exp(t) { return Math.pow(2, 10 * (t - 1)); } function d3_ease_circle(t) { return 1 - Math.sqrt(1 - t * t); } function d3_ease_elastic(a, p) { var s; if (arguments.length < 2) p = .45; if (arguments.length < 1) { a = 1; s = p / 4; } else s = p / (2 * Math.PI) * Math.asin(1 / a); return function(t) { return 1 + a * Math.pow(2, 10 * -t) * Math.sin((t - s) * 2 * Math.PI / p); }; } function d3_ease_back(s) { if (!s) s = 1.70158; return function(t) { return t * t * ((s + 1) * t - s); }; } function d3_ease_bounce(t) { return t < 1 / 2.75 ? 7.5625 * t * t : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75 : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375 : 7.5625 * (t -= 2.625 / 2.75) * t + .984375; } function d3_eventCancel() { d3.event.stopPropagation(); d3.event.preventDefault(); } function d3_eventSource() { var e = d3.event, s; while (s = e.sourceEvent) e = s; return e; } function d3_eventDispatch(target) { var dispatch = new d3_dispatch, i = 0, n = arguments.length; while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); dispatch.of = function(thiz, argumentz) { return function(e1) { try { var e0 = e1.sourceEvent = d3.event; e1.target = target; d3.event = e1; dispatch[e1.type].apply(thiz, argumentz); } finally { d3.event = e0; } }; }; return dispatch; } function d3_transform(m) { var r0 = [ m.a, m.b ], r1 = [ m.c, m.d ], kx = d3_transformNormalize(r0), kz = d3_transformDot(r0, r1), ky = d3_transformNormalize(d3_transformCombine(r1, r0, -kz)) || 0; if (r0[0] * r1[1] < r1[0] * r0[1]) { r0[0] *= -1; r0[1] *= -1; kx *= -1; kz *= -1; } this.rotate = (kx ? Math.atan2(r0[1], r0[0]) : Math.atan2(-r1[0], r1[1])) * d3_transformDegrees; this.translate = [ m.e, m.f ]; this.scale = [ kx, ky ]; this.skew = ky ? Math.atan2(kz, ky) * d3_transformDegrees : 0; } function d3_transformDot(a, b) { return a[0] * b[0] + a[1] * b[1]; } function d3_transformNormalize(a) { var k = Math.sqrt(d3_transformDot(a, a)); if (k) { a[0] /= k; a[1] /= k; } return k; } function d3_transformCombine(a, b, k) { a[0] += k * b[0]; a[1] += k * b[1]; return a; } function d3_interpolateByName(name) { return name == "transform" ? d3.interpolateTransform : d3.interpolate; } function d3_uninterpolateNumber(a, b) { b = b - (a = +a) ? 1 / (b - a) : 0; return function(x) { return (x - a) * b; }; } function d3_uninterpolateClamp(a, b) { b = b - (a = +a) ? 1 / (b - a) : 0; return function(x) { return Math.max(0, Math.min(1, (x - a) * b)); }; } function d3_Color() {} function d3_rgb(r, g, b) { return new d3_Rgb(r, g, b); } function d3_Rgb(r, g, b) { this.r = r; this.g = g; this.b = b; } function d3_rgb_hex(v) { return v < 16 ? "0" + Math.max(0, v).toString(16) : Math.min(255, v).toString(16); } function d3_rgb_parse(format, rgb, hsl) { var r = 0, g = 0, b = 0, m1, m2, name; m1 = /([a-z]+)\((.*)\)/i.exec(format); if (m1) { m2 = m1[2].split(","); switch (m1[1]) { case "hsl": { return hsl(parseFloat(m2[0]), parseFloat(m2[1]) / 100, parseFloat(m2[2]) / 100); } case "rgb": { return rgb(d3_rgb_parseNumber(m2[0]), d3_rgb_parseNumber(m2[1]), d3_rgb_parseNumber(m2[2])); } } } if (name = d3_rgb_names.get(format)) return rgb(name.r, name.g, name.b); if (format != null && format.charAt(0) === "#") { if (format.length === 4) { r = format.charAt(1); r += r; g = format.charAt(2); g += g; b = format.charAt(3); b += b; } else if (format.length === 7) { r = format.substring(1, 3); g = format.substring(3, 5); b = format.substring(5, 7); } r = parseInt(r, 16); g = parseInt(g, 16); b = parseInt(b, 16); } return rgb(r, g, b); } function d3_rgb_hsl(r, g, b) { var min = Math.min(r /= 255, g /= 255, b /= 255), max = Math.max(r, g, b), d = max - min, h, s, l = (max + min) / 2; if (d) { s = l < .5 ? d / (max + min) : d / (2 - max - min); if (r == max) h = (g - b) / d + (g < b ? 6 : 0); else if (g == max) h = (b - r) / d + 2; else h = (r - g) / d + 4; h *= 60; } else { s = h = 0; } return d3_hsl(h, s, l); } function d3_rgb_lab(r, g, b) { r = d3_rgb_xyz(r); g = d3_rgb_xyz(g); b = d3_rgb_xyz(b); var x = d3_xyz_lab((.4124564 * r + .3575761 * g + .1804375 * b) / d3_lab_X), y = d3_xyz_lab((.2126729 * r + .7151522 * g + .072175 * b) / d3_lab_Y), z = d3_xyz_lab((.0193339 * r + .119192 * g + .9503041 * b) / d3_lab_Z); return d3_lab(116 * y - 16, 500 * (x - y), 200 * (y - z)); } function d3_rgb_xyz(r) { return (r /= 255) <= .04045 ? r / 12.92 : Math.pow((r + .055) / 1.055, 2.4); } function d3_rgb_parseNumber(c) { var f = parseFloat(c); return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f; } function d3_hsl(h, s, l) { return new d3_Hsl(h, s, l); } function d3_Hsl(h, s, l) { this.h = h; this.s = s; this.l = l; } function d3_hsl_rgb(h, s, l) { function v(h) { if (h > 360) h -= 360; else if (h < 0) h += 360; if (h < 60) return m1 + (m2 - m1) * h / 60; if (h < 180) return m2; if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60; return m1; } function vv(h) { return Math.round(v(h) * 255); } var m1, m2; h = h % 360; if (h < 0) h += 360; s = s < 0 ? 0 : s > 1 ? 1 : s; l = l < 0 ? 0 : l > 1 ? 1 : l; m2 = l <= .5 ? l * (1 + s) : l + s - l * s; m1 = 2 * l - m2; return d3_rgb(vv(h + 120), vv(h), vv(h - 120)); } function d3_hcl(h, c, l) { return new d3_Hcl(h, c, l); } function d3_Hcl(h, c, l) { this.h = h; this.c = c; this.l = l; } function d3_hcl_lab(h, c, l) { return d3_lab(l, Math.cos(h *= Math.PI / 180) * c, Math.sin(h) * c); } function d3_lab(l, a, b) { return new d3_Lab(l, a, b); } function d3_Lab(l, a, b) { this.l = l; this.a = a; this.b = b; } function d3_lab_rgb(l, a, b) { var y = (l + 16) / 116, x = y + a / 500, z = y - b / 200; x = d3_lab_xyz(x) * d3_lab_X; y = d3_lab_xyz(y) * d3_lab_Y; z = d3_lab_xyz(z) * d3_lab_Z; return d3_rgb(d3_xyz_rgb(3.2404542 * x - 1.5371385 * y - .4985314 * z), d3_xyz_rgb(-.969266 * x + 1.8760108 * y + .041556 * z), d3_xyz_rgb(.0556434 * x - .2040259 * y + 1.0572252 * z)); } function d3_lab_hcl(l, a, b) { return d3_hcl(Math.atan2(b, a) / Math.PI * 180, Math.sqrt(a * a + b * b), l); } function d3_lab_xyz(x) { return x > .206893034 ? x * x * x : (x - 4 / 29) / 7.787037; } function d3_xyz_lab(x) { return x > .008856 ? Math.pow(x, 1 / 3) : 7.787037 * x + 4 / 29; } function d3_xyz_rgb(r) { return Math.round(255 * (r <= .00304 ? 12.92 * r : 1.055 * Math.pow(r, 1 / 2.4) - .055)); } function d3_selection(groups) { d3_arraySubclass(groups, d3_selectionPrototype); return groups; } function d3_selection_selector(selector) { return function() { return d3_select(selector, this); }; } function d3_selection_selectorAll(selector) { return function() { return d3_selectAll(selector, this); }; } function d3_selection_attr(name, value) { function attrNull() { this.removeAttribute(name); } function attrNullNS() { this.removeAttributeNS(name.space, name.local); } function attrConstant() { this.setAttribute(name, value); } function attrConstantNS() { this.setAttributeNS(name.space, name.local, value); } function attrFunction() { var x = value.apply(this, arguments); if (x == null) this.removeAttribute(name); else this.setAttribute(name, x); } function attrFunctionNS() { var x = value.apply(this, arguments); if (x == null) this.removeAttributeNS(name.space, name.local); else this.setAttributeNS(name.space, name.local, x); } name = d3.ns.qualify(name); return value == null ? name.local ? attrNullNS : attrNull : typeof value === "function" ? name.local ? attrFunctionNS : attrFunction : name.local ? attrConstantNS : attrConstant; } function d3_selection_classedRe(name) { return new RegExp("(?:^|\\s+)" + d3.requote(name) + "(?:\\s+|$)", "g"); } function d3_selection_classed(name, value) { function classedConstant() { var i = -1; while (++i < n) name[i](this, value); } function classedFunction() { var i = -1, x = value.apply(this, arguments); while (++i < n) name[i](this, x); } name = name.trim().split(/\s+/).map(d3_selection_classedName); var n = name.length; return typeof value === "function" ? classedFunction : classedConstant; } function d3_selection_classedName(name) { var re = d3_selection_classedRe(name); return function(node, value) { if (c = node.classList) return value ? c.add(name) : c.remove(name); var c = node.className, cb = c.baseVal != null, cv = cb ? c.baseVal : c; if (value) { re.lastIndex = 0; if (!re.test(cv)) { cv = d3_collapse(cv + " " + name); if (cb) c.baseVal = cv; else node.className = cv; } } else if (cv) { cv = d3_collapse(cv.replace(re, " ")); if (cb) c.baseVal = cv; else node.className = cv; } }; } function d3_selection_style(name, value, priority) { function styleNull() { this.style.removeProperty(name); } function styleConstant() { this.style.setProperty(name, value, priority); } function styleFunction() { var x = value.apply(this, arguments); if (x == null) this.style.removeProperty(name); else this.style.setProperty(name, x, priority); } return value == null ? styleNull : typeof value === "function" ? styleFunction : styleConstant; } function d3_selection_property(name, value) { function propertyNull() { delete this[name]; } function propertyConstant() { this[name] = value; } function propertyFunction() { var x = value.apply(this, arguments); if (x == null) delete this[name]; else this[name] = x; } return value == null ? propertyNull : typeof value === "function" ? propertyFunction : propertyConstant; } function d3_selection_dataNode(data) { return { __data__: data }; } function d3_selection_filter(selector) { return function() { return d3_selectMatches(this, selector); }; } function d3_selection_sortComparator(comparator) { if (!arguments.length) comparator = d3.ascending; return function(a, b) { return comparator(a && a.__data__, b && b.__data__); }; } function d3_selection_on(type, listener, capture) { function onRemove() { var wrapper = this[name]; if (wrapper) { this.removeEventListener(type, wrapper, wrapper.$); delete this[name]; } } function onAdd() { function wrapper(e) { var o = d3.event; d3.event = e; args[0] = node.__data__; try { listener.apply(node, args); } finally { d3.event = o; } } var node = this, args = arguments; onRemove.call(this); this.addEventListener(type, this[name] = wrapper, wrapper.$ = capture); wrapper._ = listener; } var name = "__on" + type, i = type.indexOf("."); if (i > 0) type = type.substring(0, i); return listener ? onAdd : onRemove; } function d3_selection_each(groups, callback) { for (var j = 0, m = groups.length; j < m; j++) { for (var group = groups[j], i = 0, n = group.length, node; i < n; i++) { if (node = group[i]) callback(node, i, j); } } return groups; } function d3_selection_enter(selection) { d3_arraySubclass(selection, d3_selection_enterPrototype); return selection; } function d3_transition(groups, id, time) { d3_arraySubclass(groups, d3_transitionPrototype); var tweens = new d3_Map, event = d3.dispatch("start", "end"), ease = d3_transitionEase; groups.id = id; groups.time = time; groups.tween = function(name, tween) { if (arguments.length < 2) return tweens.get(name); if (tween == null) tweens.remove(name); else tweens.set(name, tween); return groups; }; groups.ease = function(value) { if (!arguments.length) return ease; ease = typeof value === "function" ? value : d3.ease.apply(d3, arguments); return groups; }; groups.each = function(type, listener) { if (arguments.length < 2) return d3_transition_each.call(groups, type); event.on(type, listener); return groups; }; d3.timer(function(elapsed) { return d3_selection_each(groups, function(node, i, j) { function start(elapsed) { if (lock.active > id) return stop(); lock.active = id; tweens.forEach(function(key, value) { if (value = value.call(node, d, i)) { tweened.push(value); } }); event.start.call(node, d, i); if (!tick(elapsed)) d3.timer(tick, 0, time); return 1; } function tick(elapsed) { if (lock.active !== id) return stop(); var t = (elapsed - delay) / duration, e = ease(t), n = tweened.length; while (n > 0) { tweened[--n].call(node, e); } if (t >= 1) { stop(); d3_transitionId = id; event.end.call(node, d, i); d3_transitionId = 0; return 1; } } function stop() { if (!--lock.count) delete node.__transition__; return 1; } var tweened = [], delay = node.delay, duration = node.duration, lock = (node = node.node).__transition__ || (node.__transition__ = { active: 0, count: 0 }), d = node.__data__; ++lock.count; delay <= elapsed ? start(elapsed) : d3.timer(start, delay, time); }); }, 0, time); return groups; } function d3_transition_each(callback) { var id = d3_transitionId, ease = d3_transitionEase, delay = d3_transitionDelay, duration = d3_transitionDuration; d3_transitionId = this.id; d3_transitionEase = this.ease(); d3_selection_each(this, function(node, i, j) { d3_transitionDelay = node.delay; d3_transitionDuration = node.duration; callback.call(node = node.node, node.__data__, i, j); }); d3_transitionId = id; d3_transitionEase = ease; d3_transitionDelay = delay; d3_transitionDuration = duration; return this; } function d3_tweenNull(d, i, a) { return a != "" && d3_tweenRemove; } function d3_tweenByName(b, name) { return d3.tween(b, d3_interpolateByName(name)); } function d3_timer_step() { var elapsed, now = Date.now(), t1 = d3_timer_queue; while (t1) { elapsed = now - t1.then; if (elapsed >= t1.delay) t1.flush = t1.callback(elapsed); t1 = t1.next; } var delay = d3_timer_flush() - now; if (delay > 24) { if (isFinite(delay)) { clearTimeout(d3_timer_timeout); d3_timer_timeout = setTimeout(d3_timer_step, delay); } d3_timer_interval = 0; } else { d3_timer_interval = 1; d3_timer_frame(d3_timer_step); } } function d3_timer_flush() { var t0 = null, t1 = d3_timer_queue, then = Infinity; while (t1) { if (t1.flush) { delete d3_timer_byId[t1.callback.id]; t1 = t0 ? t0.next = t1.next : d3_timer_queue = t1.next; } else { then = Math.min(then, t1.then + t1.delay); t1 = (t0 = t1).next; } } return then; } function d3_mousePoint(container, e) { var svg = container.ownerSVGElement || container; if (svg.createSVGPoint) { var point = svg.createSVGPoint(); if (d3_mouse_bug44083 < 0 && (window.scrollX || window.scrollY)) { svg = d3.select(document.body).append("svg").style("position", "absolute").style("top", 0).style("left", 0); var ctm = svg[0][0].getScreenCTM(); d3_mouse_bug44083 = !(ctm.f || ctm.e); svg.remove(); } if (d3_mouse_bug44083) { point.x = e.pageX; point.y = e.pageY; } else { point.x = e.clientX; point.y = e.clientY; } point = point.matrixTransform(container.getScreenCTM().inverse()); return [ point.x, point.y ]; } var rect = container.getBoundingClientRect(); return [ e.clientX - rect.left - container.clientLeft, e.clientY - rect.top - container.clientTop ]; } function d3_noop() {} function d3_scaleExtent(domain) { var start = domain[0], stop = domain[domain.length - 1]; return start < stop ? [ start, stop ] : [ stop, start ]; } function d3_scaleRange(scale) { return scale.rangeExtent ? scale.rangeExtent() : d3_scaleExtent(scale.range()); } function d3_scale_nice(domain, nice) { var i0 = 0, i1 = domain.length - 1, x0 = domain[i0], x1 = domain[i1], dx; if (x1 < x0) { dx = i0, i0 = i1, i1 = dx; dx = x0, x0 = x1, x1 = dx; } if (nice = nice(x1 - x0)) { domain[i0] = nice.floor(x0); domain[i1] = nice.ceil(x1); } return domain; } function d3_scale_niceDefault() { return Math; } function d3_scale_linear(domain, range, interpolate, clamp) { function rescale() { var linear = Math.min(domain.length, range.length) > 2 ? d3_scale_polylinear : d3_scale_bilinear, uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber; output = linear(domain, range, uninterpolate, interpolate); input = linear(range, domain, uninterpolate, d3.interpolate); return scale; } function scale(x) { return output(x); } var output, input; scale.invert = function(y) { return input(y); }; scale.domain = function(x) { if (!arguments.length) return domain; domain = x.map(Number); return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.rangeRound = function(x) { return scale.range(x).interpolate(d3.interpolateRound); }; scale.clamp = function(x) { if (!arguments.length) return clamp; clamp = x; return rescale(); }; scale.interpolate = function(x) { if (!arguments.length) return interpolate; interpolate = x; return rescale(); }; scale.ticks = function(m) { return d3_scale_linearTicks(domain, m); }; scale.tickFormat = function(m) { return d3_scale_linearTickFormat(domain, m); }; scale.nice = function() { d3_scale_nice(domain, d3_scale_linearNice); return rescale(); }; scale.copy = function() { return d3_scale_linear(domain, range, interpolate, clamp); }; return rescale(); } function d3_scale_linearRebind(scale, linear) { return d3.rebind(scale, linear, "range", "rangeRound", "interpolate", "clamp"); } function d3_scale_linearNice(dx) { dx = Math.pow(10, Math.round(Math.log(dx) / Math.LN10) - 1); return dx && { floor: function(x) { return Math.floor(x / dx) * dx; }, ceil: function(x) { return Math.ceil(x / dx) * dx; } }; } function d3_scale_linearTickRange(domain, m) { var extent = d3_scaleExtent(domain), span = extent[1] - extent[0], step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)), err = m / span * step; if (err <= .15) step *= 10; else if (err <= .35) step *= 5; else if (err <= .75) step *= 2; extent[0] = Math.ceil(extent[0] / step) * step; extent[1] = Math.floor(extent[1] / step) * step + step * .5; extent[2] = step; return extent; } function d3_scale_linearTicks(domain, m) { return d3.range.apply(d3, d3_scale_linearTickRange(domain, m)); } function d3_scale_linearTickFormat(domain, m) { return d3.format(",." + Math.max(0, -Math.floor(Math.log(d3_scale_linearTickRange(domain, m)[2]) / Math.LN10 + .01)) + "f"); } function d3_scale_bilinear(domain, range, uninterpolate, interpolate) { var u = uninterpolate(domain[0], domain[1]), i = interpolate(range[0], range[1]); return function(x) { return i(u(x)); }; } function d3_scale_polylinear(domain, range, uninterpolate, interpolate) { var u = [], i = [], j = 0, k = Math.min(domain.length, range.length) - 1; if (domain[k] < domain[0]) { domain = domain.slice().reverse(); range = range.slice().reverse(); } while (++j <= k) { u.push(uninterpolate(domain[j - 1], domain[j])); i.push(interpolate(range[j - 1], range[j])); } return function(x) { var j = d3.bisect(domain, x, 1, k) - 1; return i[j](u[j](x)); }; } function d3_scale_log(linear, log) { function scale(x) { return linear(log(x)); } var pow = log.pow; scale.invert = function(x) { return pow(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return linear.domain().map(pow); log = x[0] < 0 ? d3_scale_logn : d3_scale_logp; pow = log.pow; linear.domain(x.map(log)); return scale; }; scale.nice = function() { linear.domain(d3_scale_nice(linear.domain(), d3_scale_niceDefault)); return scale; }; scale.ticks = function() { var extent = d3_scaleExtent(linear.domain()), ticks = []; if (extent.every(isFinite)) { var i = Math.floor(extent[0]), j = Math.ceil(extent[1]), u = pow(extent[0]), v = pow(extent[1]); if (log === d3_scale_logn) { ticks.push(pow(i)); for (; i++ < j; ) for (var k = 9; k > 0; k--) ticks.push(pow(i) * k); } else { for (; i < j; i++) for (var k = 1; k < 10; k++) ticks.push(pow(i) * k); ticks.push(pow(i)); } for (i = 0; ticks[i] < u; i++) {} for (j = ticks.length; ticks[j - 1] > v; j--) {} ticks = ticks.slice(i, j); } return ticks; }; scale.tickFormat = function(n, format) { if (arguments.length < 2) format = d3_scale_logFormat; if (arguments.length < 1) return format; var k = Math.max(.1, n / scale.ticks().length), f = log === d3_scale_logn ? (e = -1e-12, Math.floor) : (e = 1e-12, Math.ceil), e; return function(d) { return d / pow(f(log(d) + e)) <= k ? format(d) : ""; }; }; scale.copy = function() { return d3_scale_log(linear.copy(), log); }; return d3_scale_linearRebind(scale, linear); } function d3_scale_logp(x) { return Math.log(x < 0 ? 0 : x) / Math.LN10; } function d3_scale_logn(x) { return -Math.log(x > 0 ? 0 : -x) / Math.LN10; } function d3_scale_pow(linear, exponent) { function scale(x) { return linear(powp(x)); } var powp = d3_scale_powPow(exponent), powb = d3_scale_powPow(1 / exponent); scale.invert = function(x) { return powb(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return linear.domain().map(powb); linear.domain(x.map(powp)); return scale; }; scale.ticks = function(m) { return d3_scale_linearTicks(scale.domain(), m); }; scale.tickFormat = function(m) { return d3_scale_linearTickFormat(scale.domain(), m); }; scale.nice = function() { return scale.domain(d3_scale_nice(scale.domain(), d3_scale_linearNice)); }; scale.exponent = function(x) { if (!arguments.length) return exponent; var domain = scale.domain(); powp = d3_scale_powPow(exponent = x); powb = d3_scale_powPow(1 / exponent); return scale.domain(domain); }; scale.copy = function() { return d3_scale_pow(linear.copy(), exponent); }; return d3_scale_linearRebind(scale, linear); } function d3_scale_powPow(e) { return function(x) { return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e); }; } function d3_scale_ordinal(domain, ranger) { function scale(x) { return range[((index.get(x) || index.set(x, domain.push(x))) - 1) % range.length]; } function steps(start, step) { return d3.range(domain.length).map(function(i) { return start + step * i; }); } var index, range, rangeBand; scale.domain = function(x) { if (!arguments.length) return domain; domain = []; index = new d3_Map; var i = -1, n = x.length, xi; while (++i < n) if (!index.has(xi = x[i])) index.set(xi, domain.push(xi)); return scale[ranger.t].apply(scale, ranger.a); }; scale.range = function(x) { if (!arguments.length) return range; range = x; rangeBand = 0; ranger = { t: "range", a: arguments }; return scale; }; scale.rangePoints = function(x, padding) { if (arguments.length < 2) padding = 0; var start = x[0], stop = x[1], step = (stop - start) / (Math.max(1, domain.length - 1) + padding); range = steps(domain.length < 2 ? (start + stop) / 2 : start + step * padding / 2, step); rangeBand = 0; ranger = { t: "rangePoints", a: arguments }; return scale; }; scale.rangeBands = function(x, padding, outerPadding) { if (arguments.length < 2) padding = 0; if (arguments.length < 3) outerPadding = padding; var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = (stop - start) / (domain.length - padding + 2 * outerPadding); range = steps(start + step * outerPadding, step); if (reverse) range.reverse(); rangeBand = step * (1 - padding); ranger = { t: "rangeBands", a: arguments }; return scale; }; scale.rangeRoundBands = function(x, padding, outerPadding) { if (arguments.length < 2) padding = 0; if (arguments.length < 3) outerPadding = padding; var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = Math.floor((stop - start) / (domain.length - padding + 2 * outerPadding)), error = stop - start - (domain.length - padding) * step; range = steps(start + Math.round(error / 2), step); if (reverse) range.reverse(); rangeBand = Math.round(step * (1 - padding)); ranger = { t: "rangeRoundBands", a: arguments }; return scale; }; scale.rangeBand = function() { return rangeBand; }; scale.rangeExtent = function() { return d3_scaleExtent(ranger.a[0]); }; scale.copy = function() { return d3_scale_ordinal(domain, ranger); }; return scale.domain(domain); } function d3_scale_quantile(domain, range) { function rescale() { var k = 0, n = domain.length, q = range.length; thresholds = []; while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q); return scale; } function scale(x) { if (isNaN(x = +x)) return NaN; return range[d3.bisect(thresholds, x)]; } var thresholds; scale.domain = function(x) { if (!arguments.length) return domain; domain = x.filter(function(d) { return !isNaN(d); }).sort(d3.ascending); return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.quantiles = function() { return thresholds; }; scale.copy = function() { return d3_scale_quantile(domain, range); }; return rescale(); } function d3_scale_quantize(x0, x1, range) { function scale(x) { return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))]; } function rescale() { kx = range.length / (x1 - x0); i = range.length - 1; return scale; } var kx, i; scale.domain = function(x) { if (!arguments.length) return [ x0, x1 ]; x0 = +x[0]; x1 = +x[x.length - 1]; return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.copy = function() { return d3_scale_quantize(x0, x1, range); }; return rescale(); } function d3_scale_threshold(domain, range) { function scale(x) { return range[d3.bisect(domain, x)]; } scale.domain = function(_) { if (!arguments.length) return domain; domain = _; return scale; }; scale.range = function(_) { if (!arguments.length) return range; range = _; return scale; }; scale.copy = function() { return d3_scale_threshold(domain, range); }; return scale; } function d3_scale_identity(domain) { function identity(x) { return +x; } identity.invert = identity; identity.domain = identity.range = function(x) { if (!arguments.length) return domain; domain = x.map(identity); return identity; }; identity.ticks = function(m) { return d3_scale_linearTicks(domain, m); }; identity.tickFormat = function(m) { return d3_scale_linearTickFormat(domain, m); }; identity.copy = function() { return d3_scale_identity(domain); }; return identity; } function d3_svg_arcInnerRadius(d) { return d.innerRadius; } function d3_svg_arcOuterRadius(d) { return d.outerRadius; } function d3_svg_arcStartAngle(d) { return d.startAngle; } function d3_svg_arcEndAngle(d) { return d.endAngle; } function d3_svg_line(projection) { function line(data) { function segment() { segments.push("M", interpolate(projection(points), tension)); } var segments = [], points = [], i = -1, n = data.length, d, fx = d3_functor(x), fy = d3_functor(y); while (++i < n) { if (defined.call(this, d = data[i], i)) { points.push([ +fx.call(this, d, i), +fy.call(this, d, i) ]); } else if (points.length) { segment(); points = []; } } if (points.length) segment(); return segments.length ? segments.join("") : null; } var x = d3_svg_lineX, y = d3_svg_lineY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, tension = .7; line.x = function(_) { if (!arguments.length) return x; x = _; return line; }; line.y = function(_) { if (!arguments.length) return y; y = _; return line; }; line.defined = function(_) { if (!arguments.length) return defined; defined = _; return line; }; line.interpolate = function(_) { if (!arguments.length) return interpolateKey; if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; return line; }; line.tension = function(_) { if (!arguments.length) return tension; tension = _; return line; }; return line; } function d3_svg_lineX(d) { return d[0]; } function d3_svg_lineY(d) { return d[1]; } function d3_svg_lineLinear(points) { return points.join("L"); } function d3_svg_lineLinearClosed(points) { return d3_svg_lineLinear(points) + "Z"; } function d3_svg_lineStepBefore(points) { var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]); return path.join(""); } function d3_svg_lineStepAfter(points) { var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]); return path.join(""); } function d3_svg_lineCardinalOpen(points, tension) { return points.length < 4 ? d3_svg_lineLinear(points) : points[1] + d3_svg_lineHermite(points.slice(1, points.length - 1), d3_svg_lineCardinalTangents(points, tension)); } function d3_svg_lineCardinalClosed(points, tension) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite((points.push(points[0]), points), d3_svg_lineCardinalTangents([ points[points.length - 2] ].concat(points, [ points[1] ]), tension)); } function d3_svg_lineCardinal(points, tension, closed) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineCardinalTangents(points, tension)); } function d3_svg_lineHermite(points, tangents) { if (tangents.length < 1 || points.length != tangents.length && points.length != tangents.length + 2) { return d3_svg_lineLinear(points); } var quad = points.length != tangents.length, path = "", p0 = points[0], p = points[1], t0 = tangents[0], t = t0, pi = 1; if (quad) { path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3) + "," + p[0] + "," + p[1]; p0 = points[1]; pi = 2; } if (tangents.length > 1) { t = tangents[1]; p = points[pi]; pi++; path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1]) + "," + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; for (var i = 2; i < tangents.length; i++, pi++) { p = points[pi]; t = tangents[i]; path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; } } if (quad) { var lp = points[pi]; path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3) + "," + lp[0] + "," + lp[1]; } return path; } function d3_svg_lineCardinalTangents(points, tension) { var tangents = [], a = (1 - tension) / 2, p0, p1 = points[0], p2 = points[1], i = 1, n = points.length; while (++i < n) { p0 = p1; p1 = p2; p2 = points[i]; tangents.push([ a * (p2[0] - p0[0]), a * (p2[1] - p0[1]) ]); } return tangents; } function d3_svg_lineBasis(points) { if (points.length < 3) return d3_svg_lineLinear(points); var i = 1, n = points.length, pi = points[0], x0 = pi[0], y0 = pi[1], px = [ x0, x0, x0, (pi = points[1])[0] ], py = [ y0, y0, y0, pi[1] ], path = [ x0, ",", y0 ]; d3_svg_lineBasisBezier(path, px, py); while (++i < n) { pi = points[i]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } i = -1; while (++i < 2) { px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } return path.join(""); } function d3_svg_lineBasisOpen(points) { if (points.length < 4) return d3_svg_lineLinear(points); var path = [], i = -1, n = points.length, pi, px = [ 0 ], py = [ 0 ]; while (++i < 3) { pi = points[i]; px.push(pi[0]); py.push(pi[1]); } path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px) + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py)); --i; while (++i < n) { pi = points[i]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } return path.join(""); } function d3_svg_lineBasisClosed(points) { var path, i = -1, n = points.length, m = n + 4, pi, px = [], py = []; while (++i < 4) { pi = points[i % n]; px.push(pi[0]); py.push(pi[1]); } path = [ d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ]; --i; while (++i < m) { pi = points[i % n]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } return path.join(""); } function d3_svg_lineBundle(points, tension) { var n = points.length - 1; if (n) { var x0 = points[0][0], y0 = points[0][1], dx = points[n][0] - x0, dy = points[n][1] - y0, i = -1, p, t; while (++i <= n) { p = points[i]; t = i / n; p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx); p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy); } } return d3_svg_lineBasis(points); } function d3_svg_lineDot4(a, b) { return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3]; } function d3_svg_lineBasisBezier(path, x, y) { path.push("C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y)); } function d3_svg_lineSlope(p0, p1) { return (p1[1] - p0[1]) / (p1[0] - p0[0]); } function d3_svg_lineFiniteDifferences(points) { var i = 0, j = points.length - 1, m = [], p0 = points[0], p1 = points[1], d = m[0] = d3_svg_lineSlope(p0, p1); while (++i < j) { m[i] = (d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1]))) / 2; } m[i] = d; return m; } function d3_svg_lineMonotoneTangents(points) { var tangents = [], d, a, b, s, m = d3_svg_lineFiniteDifferences(points), i = -1, j = points.length - 1; while (++i < j) { d = d3_svg_lineSlope(points[i], points[i + 1]); if (Math.abs(d) < 1e-6) { m[i] = m[i + 1] = 0; } else { a = m[i] / d; b = m[i + 1] / d; s = a * a + b * b; if (s > 9) { s = d * 3 / Math.sqrt(s); m[i] = s * a; m[i + 1] = s * b; } } } i = -1; while (++i <= j) { s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0]) / (6 * (1 + m[i] * m[i])); tangents.push([ s || 0, m[i] * s || 0 ]); } return tangents; } function d3_svg_lineMonotone(points) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points)); } function d3_svg_lineRadial(points) { var point, i = -1, n = points.length, r, a; while (++i < n) { point = points[i]; r = point[0]; a = point[1] + d3_svg_arcOffset; point[0] = r * Math.cos(a); point[1] = r * Math.sin(a); } return points; } function d3_svg_area(projection) { function area(data) { function segment() { segments.push("M", interpolate(projection(points1), tension), L, interpolateReverse(projection(points0.reverse()), tension), "Z"); } var segments = [], points0 = [], points1 = [], i = -1, n = data.length, d, fx0 = d3_functor(x0), fy0 = d3_functor(y0), fx1 = x0 === x1 ? function() { return x; } : d3_functor(x1), fy1 = y0 === y1 ? function() { return y; } : d3_functor(y1), x, y; while (++i < n) { if (defined.call(this, d = data[i], i)) { points0.push([ x = +fx0.call(this, d, i), y = +fy0.call(this, d, i) ]); points1.push([ +fx1.call(this, d, i), +fy1.call(this, d, i) ]); } else if (points0.length) { segment(); points0 = []; points1 = []; } } if (points0.length) segment(); return segments.length ? segments.join("") : null; } var x0 = d3_svg_lineX, x1 = d3_svg_lineX, y0 = 0, y1 = d3_svg_lineY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, interpolateReverse = interpolate, L = "L", tension = .7; area.x = function(_) { if (!arguments.length) return x1; x0 = x1 = _; return area; }; area.x0 = function(_) { if (!arguments.length) return x0; x0 = _; return area; }; area.x1 = function(_) { if (!arguments.length) return x1; x1 = _; return area; }; area.y = function(_) { if (!arguments.length) return y1; y0 = y1 = _; return area; }; area.y0 = function(_) { if (!arguments.length) return y0; y0 = _; return area; }; area.y1 = function(_) { if (!arguments.length) return y1; y1 = _; return area; }; area.defined = function(_) { if (!arguments.length) return defined; defined = _; return area; }; area.interpolate = function(_) { if (!arguments.length) return interpolateKey; if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; interpolateReverse = interpolate.reverse || interpolate; L = interpolate.closed ? "M" : "L"; return area; }; area.tension = function(_) { if (!arguments.length) return tension; tension = _; return area; }; return area; } function d3_svg_chordSource(d) { return d.source; } function d3_svg_chordTarget(d) { return d.target; } function d3_svg_chordRadius(d) { return d.radius; } function d3_svg_chordStartAngle(d) { return d.startAngle; } function d3_svg_chordEndAngle(d) { return d.endAngle; } function d3_svg_diagonalProjection(d) { return [ d.x, d.y ]; } function d3_svg_diagonalRadialProjection(projection) { return function() { var d = projection.apply(this, arguments), r = d[0], a = d[1] + d3_svg_arcOffset; return [ r * Math.cos(a), r * Math.sin(a) ]; }; } function d3_svg_symbolSize() { return 64; } function d3_svg_symbolType() { return "circle"; } function d3_svg_symbolCircle(size) { var r = Math.sqrt(size / Math.PI); return "M0," + r + "A" + r + "," + r + " 0 1,1 0," + -r + "A" + r + "," + r + " 0 1,1 0," + r + "Z"; } function d3_svg_axisX(selection, x) { selection.attr("transform", function(d) { return "translate(" + x(d) + ",0)"; }); } function d3_svg_axisY(selection, y) { selection.attr("transform", function(d) { return "translate(0," + y(d) + ")"; }); } function d3_svg_axisSubdivide(scale, ticks, m) { subticks = []; if (m && ticks.length > 1) { var extent = d3_scaleExtent(scale.domain()), subticks, i = -1, n = ticks.length, d = (ticks[1] - ticks[0]) / ++m, j, v; while (++i < n) { for (j = m; --j > 0; ) { if ((v = +ticks[i] - j * d) >= extent[0]) { subticks.push(v); } } } for (--i, j = 0; ++j < m && (v = +ticks[i] + j * d) < extent[1]; ) { subticks.push(v); } } return subticks; } function d3_behavior_zoomDelta() { if (!d3_behavior_zoomDiv) { d3_behavior_zoomDiv = d3.select("body").append("div").style("visibility", "hidden").style("top", 0).style("height", 0).style("width", 0).style("overflow-y", "scroll").append("div").style("height", "2000px").node().parentNode; } var e = d3.event, delta; try { d3_behavior_zoomDiv.scrollTop = 1e3; d3_behavior_zoomDiv.dispatchEvent(e); delta = 1e3 - d3_behavior_zoomDiv.scrollTop; } catch (error) { delta = e.wheelDelta || -e.detail * 5; } return delta; } function d3_layout_bundlePath(link) { var start = link.source, end = link.target, lca = d3_layout_bundleLeastCommonAncestor(start, end), points = [ start ]; while (start !== lca) { start = start.parent; points.push(start); } var k = points.length; while (end !== lca) { points.splice(k, 0, end); end = end.parent; } return points; } function d3_layout_bundleAncestors(node) { var ancestors = [], parent = node.parent; while (parent != null) { ancestors.push(node); node = parent; parent = parent.parent; } ancestors.push(node); return ancestors; } function d3_layout_bundleLeastCommonAncestor(a, b) { if (a === b) return a; var aNodes = d3_layout_bundleAncestors(a), bNodes = d3_layout_bundleAncestors(b), aNode = aNodes.pop(), bNode = bNodes.pop(), sharedNode = null; while (aNode === bNode) { sharedNode = aNode; aNode = aNodes.pop(); bNode = bNodes.pop(); } return sharedNode; } function d3_layout_forceDragstart(d) { d.fixed |= 2; } function d3_layout_forceDragend(d) { d.fixed &= 1; } function d3_layout_forceMouseover(d) { d.fixed |= 4; } function d3_layout_forceMouseout(d) { d.fixed &= 3; } function d3_layout_forceAccumulate(quad, alpha, charges) { var cx = 0, cy = 0; quad.charge = 0; if (!quad.leaf) { var nodes = quad.nodes, n = nodes.length, i = -1, c; while (++i < n) { c = nodes[i]; if (c == null) continue; d3_layout_forceAccumulate(c, alpha, charges); quad.charge += c.charge; cx += c.charge * c.cx; cy += c.charge * c.cy; } } if (quad.point) { if (!quad.leaf) { quad.point.x += Math.random() - .5; quad.point.y += Math.random() - .5; } var k = alpha * charges[quad.point.index]; quad.charge += quad.pointCharge = k; cx += k * quad.point.x; cy += k * quad.point.y; } quad.cx = cx / quad.charge; quad.cy = cy / quad.charge; } function d3_layout_forceLinkDistance(link) { return 20; } function d3_layout_forceLinkStrength(link) { return 1; } function d3_layout_stackX(d) { return d.x; } function d3_layout_stackY(d) { return d.y; } function d3_layout_stackOut(d, y0, y) { d.y0 = y0; d.y = y; } function d3_layout_stackOrderDefault(data) { return d3.range(data.length); } function d3_layout_stackOffsetZero(data) { var j = -1, m = data[0].length, y0 = []; while (++j < m) y0[j] = 0; return y0; } function d3_layout_stackMaxIndex(array) { var i = 1, j = 0, v = array[0][1], k, n = array.length; for (; i < n; ++i) { if ((k = array[i][1]) > v) { j = i; v = k; } } return j; } function d3_layout_stackReduceSum(d) { return d.reduce(d3_layout_stackSum, 0); } function d3_layout_stackSum(p, d) { return p + d[1]; } function d3_layout_histogramBinSturges(range, values) { return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1)); } function d3_layout_histogramBinFixed(range, n) { var x = -1, b = +range[0], m = (range[1] - b) / n, f = []; while (++x <= n) f[x] = m * x + b; return f; } function d3_layout_histogramRange(values) { return [ d3.min(values), d3.max(values) ]; } function d3_layout_hierarchyRebind(object, hierarchy) { d3.rebind(object, hierarchy, "sort", "children", "value"); object.links = d3_layout_hierarchyLinks; object.nodes = function(d) { d3_layout_hierarchyInline = true; return (object.nodes = object)(d); }; return object; } function d3_layout_hierarchyChildren(d) { return d.children; } function d3_layout_hierarchyValue(d) { return d.value; } function d3_layout_hierarchySort(a, b) { return b.value - a.value; } function d3_layout_hierarchyLinks(nodes) { return d3.merge(nodes.map(function(parent) { return (parent.children || []).map(function(child) { return { source: parent, target: child }; }); })); } function d3_layout_packSort(a, b) { return a.value - b.value; } function d3_layout_packInsert(a, b) { var c = a._pack_next; a._pack_next = b; b._pack_prev = a; b._pack_next = c; c._pack_prev = b; } function d3_layout_packSplice(a, b) { a._pack_next = b; b._pack_prev = a; } function d3_layout_packIntersects(a, b) { var dx = b.x - a.x, dy = b.y - a.y, dr = a.r + b.r; return dr * dr - dx * dx - dy * dy > .001; } function d3_layout_packSiblings(node) { function bound(node) { xMin = Math.min(node.x - node.r, xMin); xMax = Math.max(node.x + node.r, xMax); yMin = Math.min(node.y - node.r, yMin); yMax = Math.max(node.y + node.r, yMax); } if (!(nodes = node.children) || !(n = nodes.length)) return; var nodes, xMin = Infinity, xMax = -Infinity, yMin = Infinity, yMax = -Infinity, a, b, c, i, j, k, n; nodes.forEach(d3_layout_packLink); a = nodes[0]; a.x = -a.r; a.y = 0; bound(a); if (n > 1) { b = nodes[1]; b.x = b.r; b.y = 0; bound(b); if (n > 2) { c = nodes[2]; d3_layout_packPlace(a, b, c); bound(c); d3_layout_packInsert(a, c); a._pack_prev = c; d3_layout_packInsert(c, b); b = a._pack_next; for (i = 3; i < n; i++) { d3_layout_packPlace(a, b, c = nodes[i]); var isect = 0, s1 = 1, s2 = 1; for (j = b._pack_next; j !== b; j = j._pack_next, s1++) { if (d3_layout_packIntersects(j, c)) { isect = 1; break; } } if (isect == 1) { for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) { if (d3_layout_packIntersects(k, c)) { break; } } } if (isect) { if (s1 < s2 || s1 == s2 && b.r < a.r) d3_layout_packSplice(a, b = j); else d3_layout_packSplice(a = k, b); i--; } else { d3_layout_packInsert(a, c); b = c; bound(c); } } } } var cx = (xMin + xMax) / 2, cy = (yMin + yMax) / 2, cr = 0; for (i = 0; i < n; i++) { c = nodes[i]; c.x -= cx; c.y -= cy; cr = Math.max(cr, c.r + Math.sqrt(c.x * c.x + c.y * c.y)); } node.r = cr; nodes.forEach(d3_layout_packUnlink); } function d3_layout_packLink(node) { node._pack_next = node._pack_prev = node; } function d3_layout_packUnlink(node) { delete node._pack_next; delete node._pack_prev; } function d3_layout_packTransform(node, x, y, k) { var children = node.children; node.x = x += k * node.x; node.y = y += k * node.y; node.r *= k; if (children) { var i = -1, n = children.length; while (++i < n) d3_layout_packTransform(children[i], x, y, k); } } function d3_layout_packPlace(a, b, c) { var db = a.r + c.r, dx = b.x - a.x, dy = b.y - a.y; if (db && (dx || dy)) { var da = b.r + c.r, dc = dx * dx + dy * dy; da *= da; db *= db; var x = .5 + (db - da) / (2 * dc), y = Math.sqrt(Math.max(0, 2 * da * (db + dc) - (db -= dc) * db - da * da)) / (2 * dc); c.x = a.x + x * dx + y * dy; c.y = a.y + x * dy - y * dx; } else { c.x = a.x + db; c.y = a.y; } } function d3_layout_clusterY(children) { return 1 + d3.max(children, function(child) { return child.y; }); } function d3_layout_clusterX(children) { return children.reduce(function(x, child) { return x + child.x; }, 0) / children.length; } function d3_layout_clusterLeft(node) { var children = node.children; return children && children.length ? d3_layout_clusterLeft(children[0]) : node; } function d3_layout_clusterRight(node) { var children = node.children, n; return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node; } function d3_layout_treeSeparation(a, b) { return a.parent == b.parent ? 1 : 2; } function d3_layout_treeLeft(node) { var children = node.children; return children && children.length ? children[0] : node._tree.thread; } function d3_layout_treeRight(node) { var children = node.children, n; return children && (n = children.length) ? children[n - 1] : node._tree.thread; } function d3_layout_treeSearch(node, compare) { var children = node.children; if (children && (n = children.length)) { var child, n, i = -1; while (++i < n) { if (compare(child = d3_layout_treeSearch(children[i], compare), node) > 0) { node = child; } } } return node; } function d3_layout_treeRightmost(a, b) { return a.x - b.x; } function d3_layout_treeLeftmost(a, b) { return b.x - a.x; } function d3_layout_treeDeepest(a, b) { return a.depth - b.depth; } function d3_layout_treeVisitAfter(node, callback) { function visit(node, previousSibling) { var children = node.children; if (children && (n = children.length)) { var child, previousChild = null, i = -1, n; while (++i < n) { child = children[i]; visit(child, previousChild); previousChild = child; } } callback(node, previousSibling); } visit(node, null); } function d3_layout_treeShift(node) { var shift = 0, change = 0, children = node.children, i = children.length, child; while (--i >= 0) { child = children[i]._tree; child.prelim += shift; child.mod += shift; shift += child.shift + (change += child.change); } } function d3_layout_treeMove(ancestor, node, shift) { ancestor = ancestor._tree; node = node._tree; var change = shift / (node.number - ancestor.number); ancestor.change += change; node.change -= change; node.shift += shift; node.prelim += shift; node.mod += shift; } function d3_layout_treeAncestor(vim, node, ancestor) { return vim._tree.ancestor.parent == node.parent ? vim._tree.ancestor : ancestor; } function d3_layout_treemapPadNull(node) { return { x: node.x, y: node.y, dx: node.dx, dy: node.dy }; } function d3_layout_treemapPad(node, padding) { var x = node.x + padding[3], y = node.y + padding[0], dx = node.dx - padding[1] - padding[3], dy = node.dy - padding[0] - padding[2]; if (dx < 0) { x += dx / 2; dx = 0; } if (dy < 0) { y += dy / 2; dy = 0; } return { x: x, y: y, dx: dx, dy: dy }; } function d3_dsv(delimiter, mimeType) { function dsv(url, callback) { d3.text(url, mimeType, function(text) { callback(text && dsv.parse(text)); }); } function formatRow(row) { return row.map(formatValue).join(delimiter); } function formatValue(text) { return reFormat.test(text) ? '"' + text.replace(/\"/g, '""') + '"' : text; } var reParse = new RegExp("\r\n|[" + delimiter + "\r\n]", "g"), reFormat = new RegExp('["' + delimiter + "\n]"), delimiterCode = delimiter.charCodeAt(0); dsv.parse = function(text) { var header; return dsv.parseRows(text, function(row, i) { if (i) { var o = {}, j = -1, m = header.length; while (++j < m) o[header[j]] = row[j]; return o; } else { header = row; return null; } }); }; dsv.parseRows = function(text, f) { function token() { if (reParse.lastIndex >= text.length) return EOF; if (eol) { eol = false; return EOL; } var j = reParse.lastIndex; if (text.charCodeAt(j) === 34) { var i = j; while (i++ < text.length) { if (text.charCodeAt(i) === 34) { if (text.charCodeAt(i + 1) !== 34) break; i++; } } reParse.lastIndex = i + 2; var c = text.charCodeAt(i + 1); if (c === 13) { eol = true; if (text.charCodeAt(i + 2) === 10) reParse.lastIndex++; } else if (c === 10) { eol = true; } return text.substring(j + 1, i).replace(/""/g, '"'); } var m = reParse.exec(text); if (m) { eol = m[0].charCodeAt(0) !== delimiterCode; return text.substring(j, m.index); } reParse.lastIndex = text.length; return text.substring(j); } var EOL = {}, EOF = {}, rows = [], n = 0, t, eol; reParse.lastIndex = 0; while ((t = token()) !== EOF) { var a = []; while (t !== EOL && t !== EOF) { a.push(t); t = token(); } if (f && !(a = f(a, n++))) continue; rows.push(a); } return rows; }; dsv.format = function(rows) { return rows.map(formatRow).join("\n"); }; return dsv; } function d3_geo_type(types, defaultValue) { return function(object) { return object && types.hasOwnProperty(object.type) ? types[object.type](object) : defaultValue; }; } function d3_path_circle(radius) { return "m0," + radius + "a" + radius + "," + radius + " 0 1,1 0," + -2 * radius + "a" + radius + "," + radius + " 0 1,1 0," + +2 * radius + "z"; } function d3_geo_bounds(o, f) { if (d3_geo_boundsTypes.hasOwnProperty(o.type)) d3_geo_boundsTypes[o.type](o, f); } function d3_geo_boundsFeature(o, f) { d3_geo_bounds(o.geometry, f); } function d3_geo_boundsFeatureCollection(o, f) { for (var a = o.features, i = 0, n = a.length; i < n; i++) { d3_geo_bounds(a[i].geometry, f); } } function d3_geo_boundsGeometryCollection(o, f) { for (var a = o.geometries, i = 0, n = a.length; i < n; i++) { d3_geo_bounds(a[i], f); } } function d3_geo_boundsLineString(o, f) { for (var a = o.coordinates, i = 0, n = a.length; i < n; i++) { f.apply(null, a[i]); } } function d3_geo_boundsMultiLineString(o, f) { for (var a = o.coordinates, i = 0, n = a.length; i < n; i++) { for (var b = a[i], j = 0, m = b.length; j < m; j++) { f.apply(null, b[j]); } } } function d3_geo_boundsMultiPolygon(o, f) { for (var a = o.coordinates, i = 0, n = a.length; i < n; i++) { for (var b = a[i][0], j = 0, m = b.length; j < m; j++) { f.apply(null, b[j]); } } } function d3_geo_boundsPoint(o, f) { f.apply(null, o.coordinates); } function d3_geo_boundsPolygon(o, f) { for (var a = o.coordinates[0], i = 0, n = a.length; i < n; i++) { f.apply(null, a[i]); } } function d3_geo_greatArcSource(d) { return d.source; } function d3_geo_greatArcTarget(d) { return d.target; } function d3_geo_greatArcInterpolator() { function interpolate(t) { var B = Math.sin(t *= d) * k, A = Math.sin(d - t) * k, x = A * kx0 + B * kx1, y = A * ky0 + B * ky1, z = A * sy0 + B * sy1; return [ Math.atan2(y, x) / d3_geo_radians, Math.atan2(z, Math.sqrt(x * x + y * y)) / d3_geo_radians ]; } var x0, y0, cy0, sy0, kx0, ky0, x1, y1, cy1, sy1, kx1, ky1, d, k; interpolate.distance = function() { if (d == null) k = 1 / Math.sin(d = Math.acos(Math.max(-1, Math.min(1, sy0 * sy1 + cy0 * cy1 * Math.cos(x1 - x0))))); return d; }; interpolate.source = function(_) { var cx0 = Math.cos(x0 = _[0] * d3_geo_radians), sx0 = Math.sin(x0); cy0 = Math.cos(y0 = _[1] * d3_geo_radians); sy0 = Math.sin(y0); kx0 = cy0 * cx0; ky0 = cy0 * sx0; d = null; return interpolate; }; interpolate.target = function(_) { var cx1 = Math.cos(x1 = _[0] * d3_geo_radians), sx1 = Math.sin(x1); cy1 = Math.cos(y1 = _[1] * d3_geo_radians); sy1 = Math.sin(y1); kx1 = cy1 * cx1; ky1 = cy1 * sx1; d = null; return interpolate; }; return interpolate; } function d3_geo_greatArcInterpolate(a, b) { var i = d3_geo_greatArcInterpolator().source(a).target(b); i.distance(); return i; } function d3_geom_contourStart(grid) { var x = 0, y = 0; while (true) { if (grid(x, y)) { return [ x, y ]; } if (x === 0) { x = y + 1; y = 0; } else { x = x - 1; y = y + 1; } } } function d3_geom_hullCCW(i1, i2, i3, v) { var t, a, b, c, d, e, f; t = v[i1]; a = t[0]; b = t[1]; t = v[i2]; c = t[0]; d = t[1]; t = v[i3]; e = t[0]; f = t[1]; return (f - b) * (c - a) - (d - b) * (e - a) > 0; } function d3_geom_polygonInside(p, a, b) { return (b[0] - a[0]) * (p[1] - a[1]) < (b[1] - a[1]) * (p[0] - a[0]); } function d3_geom_polygonIntersect(c, d, a, b) { var x1 = c[0], x2 = d[0], x3 = a[0], x4 = b[0], y1 = c[1], y2 = d[1], y3 = a[1], y4 = b[1], x13 = x1 - x3, x21 = x2 - x1, x43 = x4 - x3, y13 = y1 - y3, y21 = y2 - y1, y43 = y4 - y3, ua = (x43 * y13 - y43 * x13) / (y43 * x21 - x43 * y21); return [ x1 + ua * x21, y1 + ua * y21 ]; } function d3_voronoi_tessellate(vertices, callback) { var Sites = { list: vertices.map(function(v, i) { return { index: i, x: v[0], y: v[1] }; }).sort(function(a, b) { return a.y < b.y ? -1 : a.y > b.y ? 1 : a.x < b.x ? -1 : a.x > b.x ? 1 : 0; }), bottomSite: null }; var EdgeList = { list: [], leftEnd: null, rightEnd: null, init: function() { EdgeList.leftEnd = EdgeList.createHalfEdge(null, "l"); EdgeList.rightEnd = EdgeList.createHalfEdge(null, "l"); EdgeList.leftEnd.r = EdgeList.rightEnd; EdgeList.rightEnd.l = EdgeList.leftEnd; EdgeList.list.unshift(EdgeList.leftEnd, EdgeList.rightEnd); }, createHalfEdge: function(edge, side) { return { edge: edge, side: side, vertex: null, l: null, r: null }; }, insert: function(lb, he) { he.l = lb; he.r = lb.r; lb.r.l = he; lb.r = he; }, leftBound: function(p) { var he = EdgeList.leftEnd; do { he = he.r; } while (he != EdgeList.rightEnd && Geom.rightOf(he, p)); he = he.l; return he; }, del: function(he) { he.l.r = he.r; he.r.l = he.l; he.edge = null; }, right: function(he) { return he.r; }, left: function(he) { return he.l; }, leftRegion: function(he) { return he.edge == null ? Sites.bottomSite : he.edge.region[he.side]; }, rightRegion: function(he) { return he.edge == null ? Sites.bottomSite : he.edge.region[d3_voronoi_opposite[he.side]]; } }; var Geom = { bisect: function(s1, s2) { var newEdge = { region: { l: s1, r: s2 }, ep: { l: null, r: null } }; var dx = s2.x - s1.x, dy = s2.y - s1.y, adx = dx > 0 ? dx : -dx, ady = dy > 0 ? dy : -dy; newEdge.c = s1.x * dx + s1.y * dy + (dx * dx + dy * dy) * .5; if (adx > ady) { newEdge.a = 1; newEdge.b = dy / dx; newEdge.c /= dx; } else { newEdge.b = 1; newEdge.a = dx / dy; newEdge.c /= dy; } return newEdge; }, intersect: function(el1, el2) { var e1 = el1.edge, e2 = el2.edge; if (!e1 || !e2 || e1.region.r == e2.region.r) { return null; } var d = e1.a * e2.b - e1.b * e2.a; if (Math.abs(d) < 1e-10) { return null; } var xint = (e1.c * e2.b - e2.c * e1.b) / d, yint = (e2.c * e1.a - e1.c * e2.a) / d, e1r = e1.region.r, e2r = e2.region.r, el, e; if (e1r.y < e2r.y || e1r.y == e2r.y && e1r.x < e2r.x) { el = el1; e = e1; } else { el = el2; e = e2; } var rightOfSite = xint >= e.region.r.x; if (rightOfSite && el.side === "l" || !rightOfSite && el.side === "r") { return null; } return { x: xint, y: yint }; }, rightOf: function(he, p) { var e = he.edge, topsite = e.region.r, rightOfSite = p.x > topsite.x; if (rightOfSite && he.side === "l") { return 1; } if (!rightOfSite && he.side === "r") { return 0; } if (e.a === 1) { var dyp = p.y - topsite.y, dxp = p.x - topsite.x, fast = 0, above = 0; if (!rightOfSite && e.b < 0 || rightOfSite && e.b >= 0) { above = fast = dyp >= e.b * dxp; } else { above = p.x + p.y * e.b > e.c; if (e.b < 0) { above = !above; } if (!above) { fast = 1; } } if (!fast) { var dxs = topsite.x - e.region.l.x; above = e.b * (dxp * dxp - dyp * dyp) < dxs * dyp * (1 + 2 * dxp / dxs + e.b * e.b); if (e.b < 0) { above = !above; } } } else { var yl = e.c - e.a * p.x, t1 = p.y - yl, t2 = p.x - topsite.x, t3 = yl - topsite.y; above = t1 * t1 > t2 * t2 + t3 * t3; } return he.side === "l" ? above : !above; }, endPoint: function(edge, side, site) { edge.ep[side] = site; if (!edge.ep[d3_voronoi_opposite[side]]) return; callback(edge); }, distance: function(s, t) { var dx = s.x - t.x, dy = s.y - t.y; return Math.sqrt(dx * dx + dy * dy); } }; var EventQueue = { list: [], insert: function(he, site, offset) { he.vertex = site; he.ystar = site.y + offset; for (var i = 0, list = EventQueue.list, l = list.length; i < l; i++) { var next = list[i]; if (he.ystar > next.ystar || he.ystar == next.ystar && site.x > next.vertex.x) { continue; } else { break; } } list.splice(i, 0, he); }, del: function(he) { for (var i = 0, ls = EventQueue.list, l = ls.length; i < l && ls[i] != he; ++i) {} ls.splice(i, 1); }, empty: function() { return EventQueue.list.length === 0; }, nextEvent: function(he) { for (var i = 0, ls = EventQueue.list, l = ls.length; i < l; ++i) { if (ls[i] == he) return ls[i + 1]; } return null; }, min: function() { var elem = EventQueue.list[0]; return { x: elem.vertex.x, y: elem.ystar }; }, extractMin: function() { return EventQueue.list.shift(); } }; EdgeList.init(); Sites.bottomSite = Sites.list.shift(); var newSite = Sites.list.shift(), newIntStar; var lbnd, rbnd, llbnd, rrbnd, bisector; var bot, top, temp, p, v; var e, pm; while (true) { if (!EventQueue.empty()) { newIntStar = EventQueue.min(); } if (newSite && (EventQueue.empty() || newSite.y < newIntStar.y || newSite.y == newIntStar.y && newSite.x < newIntStar.x)) { lbnd = EdgeList.leftBound(newSite); rbnd = EdgeList.right(lbnd); bot = EdgeList.rightRegion(lbnd); e = Geom.bisect(bot, newSite); bisector = EdgeList.createHalfEdge(e, "l"); EdgeList.insert(lbnd, bisector); p = Geom.intersect(lbnd, bisector); if (p) { EventQueue.del(lbnd); EventQueue.insert(lbnd, p, Geom.distance(p, newSite)); } lbnd = bisector; bisector = EdgeList.createHalfEdge(e, "r"); EdgeList.insert(lbnd, bisector); p = Geom.intersect(bisector, rbnd); if (p) { EventQueue.insert(bisector, p, Geom.distance(p, newSite)); } newSite = Sites.list.shift(); } else if (!EventQueue.empty()) { lbnd = EventQueue.extractMin(); llbnd = EdgeList.left(lbnd); rbnd = EdgeList.right(lbnd); rrbnd = EdgeList.right(rbnd); bot = EdgeList.leftRegion(lbnd); top = EdgeList.rightRegion(rbnd); v = lbnd.vertex; Geom.endPoint(lbnd.edge, lbnd.side, v); Geom.endPoint(rbnd.edge, rbnd.side, v); EdgeList.del(lbnd); EventQueue.del(rbnd); EdgeList.del(rbnd); pm = "l"; if (bot.y > top.y) { temp = bot; bot = top; top = temp; pm = "r"; } e = Geom.bisect(bot, top); bisector = EdgeList.createHalfEdge(e, pm); EdgeList.insert(llbnd, bisector); Geom.endPoint(e, d3_voronoi_opposite[pm], v); p = Geom.intersect(llbnd, bisector); if (p) { EventQueue.del(llbnd); EventQueue.insert(llbnd, p, Geom.distance(p, bot)); } p = Geom.intersect(bisector, rrbnd); if (p) { EventQueue.insert(bisector, p, Geom.distance(p, bot)); } } else { break; } } for (lbnd = EdgeList.right(EdgeList.leftEnd); lbnd != EdgeList.rightEnd; lbnd = EdgeList.right(lbnd)) { callback(lbnd.edge); } } function d3_geom_quadtreeNode() { return { leaf: true, nodes: [], point: null }; } function d3_geom_quadtreeVisit(f, node, x1, y1, x2, y2) { if (!f(node, x1, y1, x2, y2)) { var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, children = node.nodes; if (children[0]) d3_geom_quadtreeVisit(f, children[0], x1, y1, sx, sy); if (children[1]) d3_geom_quadtreeVisit(f, children[1], sx, y1, x2, sy); if (children[2]) d3_geom_quadtreeVisit(f, children[2], x1, sy, sx, y2); if (children[3]) d3_geom_quadtreeVisit(f, children[3], sx, sy, x2, y2); } } function d3_geom_quadtreePoint(p) { return { x: p[0], y: p[1] }; } function d3_time_utc() { this._ = new Date(arguments.length > 1 ? Date.UTC.apply(this, arguments) : arguments[0]); } function d3_time_formatAbbreviate(name) { return name.substring(0, 3); } function d3_time_parse(date, template, string, j) { var c, p, i = 0, n = template.length, m = string.length; while (i < n) { if (j >= m) return -1; c = template.charCodeAt(i++); if (c == 37) { p = d3_time_parsers[template.charAt(i++)]; if (!p || (j = p(date, string, j)) < 0) return -1; } else if (c != string.charCodeAt(j++)) { return -1; } } return j; } function d3_time_formatRe(names) { return new RegExp("^(?:" + names.map(d3.requote).join("|") + ")", "i"); } function d3_time_formatLookup(names) { var map = new d3_Map, i = -1, n = names.length; while (++i < n) map.set(names[i].toLowerCase(), i); return map; } function d3_time_parseWeekdayAbbrev(date, string, i) { d3_time_dayAbbrevRe.lastIndex = 0; var n = d3_time_dayAbbrevRe.exec(string.substring(i)); return n ? i += n[0].length : -1; } function d3_time_parseWeekday(date, string, i) { d3_time_dayRe.lastIndex = 0; var n = d3_time_dayRe.exec(string.substring(i)); return n ? i += n[0].length : -1; } function d3_time_parseMonthAbbrev(date, string, i) { d3_time_monthAbbrevRe.lastIndex = 0; var n = d3_time_monthAbbrevRe.exec(string.substring(i)); return n ? (date.m = d3_time_monthAbbrevLookup.get(n[0].toLowerCase()), i += n[0].length) : -1; } function d3_time_parseMonth(date, string, i) { d3_time_monthRe.lastIndex = 0; var n = d3_time_monthRe.exec(string.substring(i)); return n ? (date.m = d3_time_monthLookup.get(n[0].toLowerCase()), i += n[0].length) : -1; } function d3_time_parseLocaleFull(date, string, i) { return d3_time_parse(date, d3_time_formats.c.toString(), string, i); } function d3_time_parseLocaleDate(date, string, i) { return d3_time_parse(date, d3_time_formats.x.toString(), string, i); } function d3_time_parseLocaleTime(date, string, i) { return d3_time_parse(date, d3_time_formats.X.toString(), string, i); } function d3_time_parseFullYear(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 4)); return n ? (date.y = +n[0], i += n[0].length) : -1; } function d3_time_parseYear(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.y = d3_time_expandYear(+n[0]), i += n[0].length) : -1; } function d3_time_expandYear(d) { return d + (d > 68 ? 1900 : 2e3); } function d3_time_parseMonthNumber(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.m = n[0] - 1, i += n[0].length) : -1; } function d3_time_parseDay(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.d = +n[0], i += n[0].length) : -1; } function d3_time_parseHour24(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.H = +n[0], i += n[0].length) : -1; } function d3_time_parseMinutes(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.M = +n[0], i += n[0].length) : -1; } function d3_time_parseSeconds(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.S = +n[0], i += n[0].length) : -1; } function d3_time_parseMilliseconds(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 3)); return n ? (date.L = +n[0], i += n[0].length) : -1; } function d3_time_parseAmPm(date, string, i) { var n = d3_time_amPmLookup.get(string.substring(i, i += 2).toLowerCase()); return n == null ? -1 : (date.p = n, i); } function d3_time_zone(d) { var z = d.getTimezoneOffset(), zs = z > 0 ? "-" : "+", zh = ~~(Math.abs(z) / 60), zm = Math.abs(z) % 60; return zs + d3_time_zfill2(zh) + d3_time_zfill2(zm); } function d3_time_formatIsoNative(date) { return date.toISOString(); } function d3_time_interval(local, step, number) { function round(date) { var d0 = local(date), d1 = offset(d0, 1); return date - d0 < d1 - date ? d0 : d1; } function ceil(date) { step(date = local(new d3_time(date - 1)), 1); return date; } function offset(date, k) { step(date = new d3_time(+date), k); return date; } function range(t0, t1, dt) { var time = ceil(t0), times = []; if (dt > 1) { while (time < t1) { if (!(number(time) % dt)) times.push(new Date(+time)); step(time, 1); } } else { while (time < t1) times.push(new Date(+time)), step(time, 1); } return times; } function range_utc(t0, t1, dt) { try { d3_time = d3_time_utc; var utc = new d3_time_utc; utc._ = t0; return range(utc, t1, dt); } finally { d3_time = Date; } } local.floor = local; local.round = round; local.ceil = ceil; local.offset = offset; local.range = range; var utc = local.utc = d3_time_interval_utc(local); utc.floor = utc; utc.round = d3_time_interval_utc(round); utc.ceil = d3_time_interval_utc(ceil); utc.offset = d3_time_interval_utc(offset); utc.range = range_utc; return local; } function d3_time_interval_utc(method) { return function(date, k) { try { d3_time = d3_time_utc; var utc = new d3_time_utc; utc._ = date; return method(utc, k)._; } finally { d3_time = Date; } }; } function d3_time_scale(linear, methods, format) { function scale(x) { return linear(x); } scale.invert = function(x) { return d3_time_scaleDate(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return linear.domain().map(d3_time_scaleDate); linear.domain(x); return scale; }; scale.nice = function(m) { return scale.domain(d3_scale_nice(scale.domain(), function() { return m; })); }; scale.ticks = function(m, k) { var extent = d3_time_scaleExtent(scale.domain()); if (typeof m !== "function") { var span = extent[1] - extent[0], target = span / m, i = d3.bisect(d3_time_scaleSteps, target); if (i == d3_time_scaleSteps.length) return methods.year(extent, m); if (!i) return linear.ticks(m).map(d3_time_scaleDate); if (Math.log(target / d3_time_scaleSteps[i - 1]) < Math.log(d3_time_scaleSteps[i] / target)) --i; m = methods[i]; k = m[1]; m = m[0].range; } return m(extent[0], new Date(+extent[1] + 1), k); }; scale.tickFormat = function() { return format; }; scale.copy = function() { return d3_time_scale(linear.copy(), methods, format); }; return d3.rebind(scale, linear, "range", "rangeRound", "interpolate", "clamp"); } function d3_time_scaleExtent(domain) { var start = domain[0], stop = domain[domain.length - 1]; return start < stop ? [ start, stop ] : [ stop, start ]; } function d3_time_scaleDate(t) { return new Date(t); } function d3_time_scaleFormat(formats) { return function(date) { var i = formats.length - 1, f = formats[i]; while (!f[1](date)) f = formats[--i]; return f[0](date); }; } function d3_time_scaleSetYear(y) { var d = new Date(y, 0, 1); d.setFullYear(y); return d; } function d3_time_scaleGetYear(d) { var y = d.getFullYear(), d0 = d3_time_scaleSetYear(y), d1 = d3_time_scaleSetYear(y + 1); return y + (d - d0) / (d1 - d0); } function d3_time_scaleUTCSetYear(y) { var d = new Date(Date.UTC(y, 0, 1)); d.setUTCFullYear(y); return d; } function d3_time_scaleUTCGetYear(d) { var y = d.getUTCFullYear(), d0 = d3_time_scaleUTCSetYear(y), d1 = d3_time_scaleUTCSetYear(y + 1); return y + (d - d0) / (d1 - d0); } if (!Date.now) Date.now = function() { return +(new Date); }; try { document.createElement("div").style.setProperty("opacity", 0, ""); } catch (error) { var d3_style_prototype = CSSStyleDeclaration.prototype, d3_style_setProperty = d3_style_prototype.setProperty; d3_style_prototype.setProperty = function(name, value, priority) { d3_style_setProperty.call(this, name, value + "", priority); }; } d3 = { version: "2.10.3" }; var d3_array = d3_arraySlice; try { d3_array(document.documentElement.childNodes)[0].nodeType; } catch (e) { d3_array = d3_arrayCopy; } var d3_arraySubclass = [].__proto__ ? function(array, prototype) { array.__proto__ = prototype; } : function(array, prototype) { for (var property in prototype) array[property] = prototype[property]; }; d3.map = function(object) { var map = new d3_Map; for (var key in object) map.set(key, object[key]); return map; }; d3_class(d3_Map, { has: function(key) { return d3_map_prefix + key in this; }, get: function(key) { return this[d3_map_prefix + key]; }, set: function(key, value) { return this[d3_map_prefix + key] = value; }, remove: function(key) { key = d3_map_prefix + key; return key in this && delete this[key]; }, keys: function() { var keys = []; this.forEach(function(key) { keys.push(key); }); return keys; }, values: function() { var values = []; this.forEach(function(key, value) { values.push(value); }); return values; }, entries: function() { var entries = []; this.forEach(function(key, value) { entries.push({ key: key, value: value }); }); return entries; }, forEach: function(f) { for (var key in this) { if (key.charCodeAt(0) === d3_map_prefixCode) { f.call(this, key.substring(1), this[key]); } } } }); var d3_map_prefix = "\0", d3_map_prefixCode = d3_map_prefix.charCodeAt(0); d3.functor = d3_functor; d3.rebind = function(target, source) { var i = 1, n = arguments.length, method; while (++i < n) target[method = arguments[i]] = d3_rebind(target, source, source[method]); return target; }; d3.ascending = function(a, b) { return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; }; d3.descending = function(a, b) { return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; }; d3.mean = function(array, f) { var n = array.length, a, m = 0, i = -1, j = 0; if (arguments.length === 1) { while (++i < n) if (d3_number(a = array[i])) m += (a - m) / ++j; } else { while (++i < n) if (d3_number(a = f.call(array, array[i], i))) m += (a - m) / ++j; } return j ? m : undefined; }; d3.median = function(array, f) { if (arguments.length > 1) array = array.map(f); array = array.filter(d3_number); return array.length ? d3.quantile(array.sort(d3.ascending), .5) : undefined; }; d3.min = function(array, f) { var i = -1, n = array.length, a, b; if (arguments.length === 1) { while (++i < n && ((a = array[i]) == null || a != a)) a = undefined; while (++i < n) if ((b = array[i]) != null && a > b) a = b; } else { while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b; } return a; }; d3.max = function(array, f) { var i = -1, n = array.length, a, b; if (arguments.length === 1) { while (++i < n && ((a = array[i]) == null || a != a)) a = undefined; while (++i < n) if ((b = array[i]) != null && b > a) a = b; } else { while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b; } return a; }; d3.extent = function(array, f) { var i = -1, n = array.length, a, b, c; if (arguments.length === 1) { while (++i < n && ((a = c = array[i]) == null || a != a)) a = c = undefined; while (++i < n) if ((b = array[i]) != null) { if (a > b) a = b; if (c < b) c = b; } } else { while (++i < n && ((a = c = f.call(array, array[i], i)) == null || a != a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null) { if (a > b) a = b; if (c < b) c = b; } } return [ a, c ]; }; d3.random = { normal: function(µ, σ) { var n = arguments.length; if (n < 2) σ = 1; if (n < 1) µ = 0; return function() { var x, y, r; do { x = Math.random() * 2 - 1; y = Math.random() * 2 - 1; r = x * x + y * y; } while (!r || r > 1); return µ + σ * x * Math.sqrt(-2 * Math.log(r) / r); }; }, logNormal: function(µ, σ) { var n = arguments.length; if (n < 2) σ = 1; if (n < 1) µ = 0; var random = d3.random.normal(); return function() { return Math.exp(µ + σ * random()); }; }, irwinHall: function(m) { return function() { for (var s = 0, j = 0; j < m; j++) s += Math.random(); return s / m; }; } }; d3.sum = function(array, f) { var s = 0, n = array.length, a, i = -1; if (arguments.length === 1) { while (++i < n) if (!isNaN(a = +array[i])) s += a; } else { while (++i < n) if (!isNaN(a = +f.call(array, array[i], i))) s += a; } return s; }; d3.quantile = function(values, p) { var H = (values.length - 1) * p + 1, h = Math.floor(H), v = values[h - 1], e = H - h; return e ? v + e * (values[h] - v) : v; }; d3.transpose = function(matrix) { return d3.zip.apply(d3, matrix); }; d3.zip = function() { if (!(n = arguments.length)) return []; for (var i = -1, m = d3.min(arguments, d3_zipLength), zips = new Array(m); ++i < m; ) { for (var j = -1, n, zip = zips[i] = new Array(n); ++j < n; ) { zip[j] = arguments[j][i]; } } return zips; }; d3.bisector = function(f) { return { left: function(a, x, lo, hi) { if (arguments.length < 3) lo = 0; if (arguments.length < 4) hi = a.length; while (lo < hi) { var mid = lo + hi >>> 1; if (f.call(a, a[mid], mid) < x) lo = mid + 1; else hi = mid; } return lo; }, right: function(a, x, lo, hi) { if (arguments.length < 3) lo = 0; if (arguments.length < 4) hi = a.length; while (lo < hi) { var mid = lo + hi >>> 1; if (x < f.call(a, a[mid], mid)) hi = mid; else lo = mid + 1; } return lo; } }; }; var d3_bisector = d3.bisector(function(d) { return d; }); d3.bisectLeft = d3_bisector.left; d3.bisect = d3.bisectRight = d3_bisector.right; d3.first = function(array, f) { var i = 0, n = array.length, a = array[0], b; if (arguments.length === 1) f = d3.ascending; while (++i < n) { if (f.call(array, a, b = array[i]) > 0) { a = b; } } return a; }; d3.last = function(array, f) { var i = 0, n = array.length, a = array[0], b; if (arguments.length === 1) f = d3.ascending; while (++i < n) { if (f.call(array, a, b = array[i]) <= 0) { a = b; } } return a; }; d3.nest = function() { function map(array, depth) { if (depth >= keys.length) return rollup ? rollup.call(nest, array) : sortValues ? array.sort(sortValues) : array; var i = -1, n = array.length, key = keys[depth++], keyValue, object, valuesByKey = new d3_Map, values, o = {}; while (++i < n) { if (values = valuesByKey.get(keyValue = key(object = array[i]))) { values.push(object); } else { valuesByKey.set(keyValue, [ object ]); } } valuesByKey.forEach(function(keyValue, values) { o[keyValue] = map(values, depth); }); return o; } function entries(map, depth) { if (depth >= keys.length) return map; var a = [], sortKey = sortKeys[depth++], key; for (key in map) { a.push({ key: key, values: entries(map[key], depth) }); } if (sortKey) a.sort(function(a, b) { return sortKey(a.key, b.key); }); return a; } var nest = {}, keys = [], sortKeys = [], sortValues, rollup; nest.map = function(array) { return map(array, 0); }; nest.entries = function(array) { return entries(map(array, 0), 0); }; nest.key = function(d) { keys.push(d); return nest; }; nest.sortKeys = function(order) { sortKeys[keys.length - 1] = order; return nest; }; nest.sortValues = function(order) { sortValues = order; return nest; }; nest.rollup = function(f) { rollup = f; return nest; }; return nest; }; d3.keys = function(map) { var keys = []; for (var key in map) keys.push(key); return keys; }; d3.values = function(map) { var values = []; for (var key in map) values.push(map[key]); return values; }; d3.entries = function(map) { var entries = []; for (var key in map) entries.push({ key: key, value: map[key] }); return entries; }; d3.permute = function(array, indexes) { var permutes = [], i = -1, n = indexes.length; while (++i < n) permutes[i] = array[indexes[i]]; return permutes; }; d3.merge = function(arrays) { return Array.prototype.concat.apply([], arrays); }; d3.split = function(array, f) { var arrays = [], values = [], value, i = -1, n = array.length; if (arguments.length < 2) f = d3_splitter; while (++i < n) { if (f.call(values, value = array[i], i)) { values = []; } else { if (!values.length) arrays.push(values); values.push(value); } } return arrays; }; d3.range = function(start, stop, step) { if (arguments.length < 3) { step = 1; if (arguments.length < 2) { stop = start; start = 0; } } if ((stop - start) / step === Infinity) throw new Error("infinite range"); var range = [], k = d3_range_integerScale(Math.abs(step)), i = -1, j; start *= k, stop *= k, step *= k; if (step < 0) while ((j = start + step * ++i) > stop) range.push(j / k); else while ((j = start + step * ++i) < stop) range.push(j / k); return range; }; d3.requote = function(s) { return s.replace(d3_requote_re, "\\$&"); }; var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g; d3.round = function(x, n) { return n ? Math.round(x * (n = Math.pow(10, n))) / n : Math.round(x); }; d3.xhr = function(url, mime, callback) { var req = new XMLHttpRequest; if (arguments.length < 3) callback = mime, mime = null; else if (mime && req.overrideMimeType) req.overrideMimeType(mime); req.open("GET", url, true); if (mime) req.setRequestHeader("Accept", mime); req.onreadystatechange = function() { if (req.readyState === 4) { var s = req.status; callback(!s && req.response || s >= 200 && s < 300 || s === 304 ? req : null); } }; req.send(null); }; d3.text = function(url, mime, callback) { function ready(req) { callback(req && req.responseText); } if (arguments.length < 3) { callback = mime; mime = null; } d3.xhr(url, mime, ready); }; d3.json = function(url, callback) { d3.text(url, "application/json", function(text) { callback(text ? JSON.parse(text) : null); }); }; d3.html = function(url, callback) { d3.text(url, "text/html", function(text) { if (text != null) { var range = document.createRange(); range.selectNode(document.body); text = range.createContextualFragment(text); } callback(text); }); }; d3.xml = function(url, mime, callback) { function ready(req) { callback(req && req.responseXML); } if (arguments.length < 3) { callback = mime; mime = null; } d3.xhr(url, mime, ready); }; var d3_nsPrefix = { svg: "http://www.w3.org/2000/svg", xhtml: "http://www.w3.org/1999/xhtml", xlink: "http://www.w3.org/1999/xlink", xml: "http://www.w3.org/XML/1998/namespace", xmlns: "http://www.w3.org/2000/xmlns/" }; d3.ns = { prefix: d3_nsPrefix, qualify: function(name) { var i = name.indexOf(":"), prefix = name; if (i >= 0) { prefix = name.substring(0, i); name = name.substring(i + 1); } return d3_nsPrefix.hasOwnProperty(prefix) ? { space: d3_nsPrefix[prefix], local: name } : name; } }; d3.dispatch = function() { var dispatch = new d3_dispatch, i = -1, n = arguments.length; while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); return dispatch; }; d3_dispatch.prototype.on = function(type, listener) { var i = type.indexOf("."), name = ""; if (i > 0) { name = type.substring(i + 1); type = type.substring(0, i); } return arguments.length < 2 ? this[type].on(name) : this[type].on(name, listener); }; d3.format = function(specifier) { var match = d3_format_re.exec(specifier), fill = match[1] || " ", sign = match[3] || "", zfill = match[5], width = +match[6], comma = match[7], precision = match[8], type = match[9], scale = 1, suffix = "", integer = false; if (precision) precision = +precision.substring(1); if (zfill) { fill = "0"; if (comma) width -= Math.floor((width - 1) / 4); } switch (type) { case "n": comma = true; type = "g"; break; case "%": scale = 100; suffix = "%"; type = "f"; break; case "p": scale = 100; suffix = "%"; type = "r"; break; case "d": integer = true; precision = 0; break; case "s": scale = -1; type = "r"; break; } if (type == "r" && !precision) type = "g"; type = d3_format_types.get(type) || d3_format_typeDefault; return function(value) { if (integer && value % 1) return ""; var negative = value < 0 && (value = -value) ? "-" : sign; if (scale < 0) { var prefix = d3.formatPrefix(value, precision); value = prefix.scale(value); suffix = prefix.symbol; } else { value *= scale; } value = type(value, precision); if (zfill) { var length = value.length + negative.length; if (length < width) value = (new Array(width - length + 1)).join(fill) + value; if (comma) value = d3_format_group(value); value = negative + value; } else { if (comma) value = d3_format_group(value); value = negative + value; var length = value.length; if (length < width) value = (new Array(width - length + 1)).join(fill) + value; } return value + suffix; }; }; var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/; var d3_format_types = d3.map({ g: function(x, p) { return x.toPrecision(p); }, e: function(x, p) { return x.toExponential(p); }, f: function(x, p) { return x.toFixed(p); }, r: function(x, p) { return d3.round(x, p = d3_format_precision(x, p)).toFixed(Math.max(0, Math.min(20, p))); } }); var d3_formatPrefixes = [ "y", "z", "a", "f", "p", "n", "μ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ].map(d3_formatPrefix); d3.formatPrefix = function(value, precision) { var i = 0; if (value) { if (value < 0) value *= -1; if (precision) value = d3.round(value, d3_format_precision(value, precision)); i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10); i = Math.max(-24, Math.min(24, Math.floor((i <= 0 ? i + 1 : i - 1) / 3) * 3)); } return d3_formatPrefixes[8 + i / 3]; }; var d3_ease_quad = d3_ease_poly(2), d3_ease_cubic = d3_ease_poly(3), d3_ease_default = function() { return d3_ease_identity; }; var d3_ease = d3.map({ linear: d3_ease_default, poly: d3_ease_poly, quad: function() { return d3_ease_quad; }, cubic: function() { return d3_ease_cubic; }, sin: function() { return d3_ease_sin; }, exp: function() { return d3_ease_exp; }, circle: function() { return d3_ease_circle; }, elastic: d3_ease_elastic, back: d3_ease_back, bounce: function() { return d3_ease_bounce; } }); var d3_ease_mode = d3.map({ "in": d3_ease_identity, out: d3_ease_reverse, "in-out": d3_ease_reflect, "out-in": function(f) { return d3_ease_reflect(d3_ease_reverse(f)); } }); d3.ease = function(name) { var i = name.indexOf("-"), t = i >= 0 ? name.substring(0, i) : name, m = i >= 0 ? name.substring(i + 1) : "in"; t = d3_ease.get(t) || d3_ease_default; m = d3_ease_mode.get(m) || d3_ease_identity; return d3_ease_clamp(m(t.apply(null, Array.prototype.slice.call(arguments, 1)))); }; d3.event = null; d3.transform = function(string) { var g = document.createElementNS(d3.ns.prefix.svg, "g"); return (d3.transform = function(string) { g.setAttribute("transform", string); var t = g.transform.baseVal.consolidate(); return new d3_transform(t ? t.matrix : d3_transformIdentity); })(string); }; d3_transform.prototype.toString = function() { return "translate(" + this.translate + ")rotate(" + this.rotate + ")skewX(" + this.skew + ")scale(" + this.scale + ")"; }; var d3_transformDegrees = 180 / Math.PI, d3_transformIdentity = { a: 1, b: 0, c: 0, d: 1, e: 0, f: 0 }; d3.interpolate = function(a, b) { var i = d3.interpolators.length, f; while (--i >= 0 && !(f = d3.interpolators[i](a, b))) ; return f; }; d3.interpolateNumber = function(a, b) { b -= a; return function(t) { return a + b * t; }; }; d3.interpolateRound = function(a, b) { b -= a; return function(t) { return Math.round(a + b * t); }; }; d3.interpolateString = function(a, b) { var m, i, j, s0 = 0, s1 = 0, s = [], q = [], n, o; d3_interpolate_number.lastIndex = 0; for (i = 0; m = d3_interpolate_number.exec(b); ++i) { if (m.index) s.push(b.substring(s0, s1 = m.index)); q.push({ i: s.length, x: m[0] }); s.push(null); s0 = d3_interpolate_number.lastIndex; } if (s0 < b.length) s.push(b.substring(s0)); for (i = 0, n = q.length; (m = d3_interpolate_number.exec(a)) && i < n; ++i) { o = q[i]; if (o.x == m[0]) { if (o.i) { if (s[o.i + 1] == null) { s[o.i - 1] += o.x; s.splice(o.i, 1); for (j = i + 1; j < n; ++j) q[j].i--; } else { s[o.i - 1] += o.x + s[o.i + 1]; s.splice(o.i, 2); for (j = i + 1; j < n; ++j) q[j].i -= 2; } } else { if (s[o.i + 1] == null) { s[o.i] = o.x; } else { s[o.i] = o.x + s[o.i + 1]; s.splice(o.i + 1, 1); for (j = i + 1; j < n; ++j) q[j].i--; } } q.splice(i, 1); n--; i--; } else { o.x = d3.interpolateNumber(parseFloat(m[0]), parseFloat(o.x)); } } while (i < n) { o = q.pop(); if (s[o.i + 1] == null) { s[o.i] = o.x; } else { s[o.i] = o.x + s[o.i + 1]; s.splice(o.i + 1, 1); } n--; } if (s.length === 1) { return s[0] == null ? q[0].x : function() { return b; }; } return function(t) { for (i = 0; i < n; ++i) s[(o = q[i]).i] = o.x(t); return s.join(""); }; }; d3.interpolateTransform = function(a, b) { var s = [], q = [], n, A = d3.transform(a), B = d3.transform(b), ta = A.translate, tb = B.translate, ra = A.rotate, rb = B.rotate, wa = A.skew, wb = B.skew, ka = A.scale, kb = B.scale; if (ta[0] != tb[0] || ta[1] != tb[1]) { s.push("translate(", null, ",", null, ")"); q.push({ i: 1, x: d3.interpolateNumber(ta[0], tb[0]) }, { i: 3, x: d3.interpolateNumber(ta[1], tb[1]) }); } else if (tb[0] || tb[1]) { s.push("translate(" + tb + ")"); } else { s.push(""); } if (ra != rb) { if (ra - rb > 180) rb += 360; else if (rb - ra > 180) ra += 360; q.push({ i: s.push(s.pop() + "rotate(", null, ")") - 2, x: d3.interpolateNumber(ra, rb) }); } else if (rb) { s.push(s.pop() + "rotate(" + rb + ")"); } if (wa != wb) { q.push({ i: s.push(s.pop() + "skewX(", null, ")") - 2, x: d3.interpolateNumber(wa, wb) }); } else if (wb) { s.push(s.pop() + "skewX(" + wb + ")"); } if (ka[0] != kb[0] || ka[1] != kb[1]) { n = s.push(s.pop() + "scale(", null, ",", null, ")"); q.push({ i: n - 4, x: d3.interpolateNumber(ka[0], kb[0]) }, { i: n - 2, x: d3.interpolateNumber(ka[1], kb[1]) }); } else if (kb[0] != 1 || kb[1] != 1) { s.push(s.pop() + "scale(" + kb + ")"); } n = q.length; return function(t) { var i = -1, o; while (++i < n) s[(o = q[i]).i] = o.x(t); return s.join(""); }; }; d3.interpolateRgb = function(a, b) { a = d3.rgb(a); b = d3.rgb(b); var ar = a.r, ag = a.g, ab = a.b, br = b.r - ar, bg = b.g - ag, bb = b.b - ab; return function(t) { return "#" + d3_rgb_hex(Math.round(ar + br * t)) + d3_rgb_hex(Math.round(ag + bg * t)) + d3_rgb_hex(Math.round(ab + bb * t)); }; }; d3.interpolateHsl = function(a, b) { a = d3.hsl(a); b = d3.hsl(b); var h0 = a.h, s0 = a.s, l0 = a.l, h1 = b.h - h0, s1 = b.s - s0, l1 = b.l - l0; if (h1 > 180) h1 -= 360; else if (h1 < -180) h1 += 360; return function(t) { return d3_hsl_rgb(h0 + h1 * t, s0 + s1 * t, l0 + l1 * t) + ""; }; }; d3.interpolateLab = function(a, b) { a = d3.lab(a); b = d3.lab(b); var al = a.l, aa = a.a, ab = a.b, bl = b.l - al, ba = b.a - aa, bb = b.b - ab; return function(t) { return d3_lab_rgb(al + bl * t, aa + ba * t, ab + bb * t) + ""; }; }; d3.interpolateHcl = function(a, b) { a = d3.hcl(a); b = d3.hcl(b); var ah = a.h, ac = a.c, al = a.l, bh = b.h - ah, bc = b.c - ac, bl = b.l - al; if (bh > 180) bh -= 360; else if (bh < -180) bh += 360; return function(t) { return d3_hcl_lab(ah + bh * t, ac + bc * t, al + bl * t) + ""; }; }; d3.interpolateArray = function(a, b) { var x = [], c = [], na = a.length, nb = b.length, n0 = Math.min(a.length, b.length), i; for (i = 0; i < n0; ++i) x.push(d3.interpolate(a[i], b[i])); for (; i < na; ++i) c[i] = a[i]; for (; i < nb; ++i) c[i] = b[i]; return function(t) { for (i = 0; i < n0; ++i) c[i] = x[i](t); return c; }; }; d3.interpolateObject = function(a, b) { var i = {}, c = {}, k; for (k in a) { if (k in b) { i[k] = d3_interpolateByName(k)(a[k], b[k]); } else { c[k] = a[k]; } } for (k in b) { if (!(k in a)) { c[k] = b[k]; } } return function(t) { for (k in i) c[k] = i[k](t); return c; }; }; var d3_interpolate_number = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g; d3.interpolators = [ d3.interpolateObject, function(a, b) { return b instanceof Array && d3.interpolateArray(a, b); }, function(a, b) { return (typeof a === "string" || typeof b === "string") && d3.interpolateString(a + "", b + ""); }, function(a, b) { return (typeof b === "string" ? d3_rgb_names.has(b) || /^(#|rgb\(|hsl\()/.test(b) : b instanceof d3_Color) && d3.interpolateRgb(a, b); }, function(a, b) { return !isNaN(a = +a) && !isNaN(b = +b) && d3.interpolateNumber(a, b); } ]; d3_Color.prototype.toString = function() { return this.rgb() + ""; }; d3.rgb = function(r, g, b) { return arguments.length === 1 ? r instanceof d3_Rgb ? d3_rgb(r.r, r.g, r.b) : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) : d3_rgb(~~r, ~~g, ~~b); }; var d3_rgbPrototype = d3_Rgb.prototype = new d3_Color; d3_rgbPrototype.brighter = function(k) { k = Math.pow(.7, arguments.length ? k : 1); var r = this.r, g = this.g, b = this.b, i = 30; if (!r && !g && !b) return d3_rgb(i, i, i); if (r && r < i) r = i; if (g && g < i) g = i; if (b && b < i) b = i; return d3_rgb(Math.min(255, Math.floor(r / k)), Math.min(255, Math.floor(g / k)), Math.min(255, Math.floor(b / k))); }; d3_rgbPrototype.darker = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_rgb(Math.floor(k * this.r), Math.floor(k * this.g), Math.floor(k * this.b)); }; d3_rgbPrototype.hsl = function() { return d3_rgb_hsl(this.r, this.g, this.b); }; d3_rgbPrototype.toString = function() { return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b); }; var d3_rgb_names = d3.map({ aliceblue: "#f0f8ff", antiquewhite: "#faebd7", aqua: "#00ffff", aquamarine: "#7fffd4", azure: "#f0ffff", beige: "#f5f5dc", bisque: "#ffe4c4", black: "#000000", blanchedalmond: "#ffebcd", blue: "#0000ff", blueviolet: "#8a2be2", brown: "#a52a2a", burlywood: "#deb887", cadetblue: "#5f9ea0", chartreuse: "#7fff00", chocolate: "#d2691e", coral: "#ff7f50", cornflowerblue: "#6495ed", cornsilk: "#fff8dc", crimson: "#dc143c", cyan: "#00ffff", darkblue: "#00008b", darkcyan: "#008b8b", darkgoldenrod: "#b8860b", darkgray: "#a9a9a9", darkgreen: "#006400", darkgrey: "#a9a9a9", darkkhaki: "#bdb76b", darkmagenta: "#8b008b", darkolivegreen: "#556b2f", darkorange: "#ff8c00", darkorchid: "#9932cc", darkred: "#8b0000", darksalmon: "#e9967a", darkseagreen: "#8fbc8f", darkslateblue: "#483d8b", darkslategray: "#2f4f4f", darkslategrey: "#2f4f4f", darkturquoise: "#00ced1", darkviolet: "#9400d3", deeppink: "#ff1493", deepskyblue: "#00bfff", dimgray: "#696969", dimgrey: "#696969", dodgerblue: "#1e90ff", firebrick: "#b22222", floralwhite: "#fffaf0", forestgreen: "#228b22", fuchsia: "#ff00ff", gainsboro: "#dcdcdc", ghostwhite: "#f8f8ff", gold: "#ffd700", goldenrod: "#daa520", gray: "#808080", green: "#008000", greenyellow: "#adff2f", grey: "#808080", honeydew: "#f0fff0", hotpink: "#ff69b4", indianred: "#cd5c5c", indigo: "#4b0082", ivory: "#fffff0", khaki: "#f0e68c", lavender: "#e6e6fa", lavenderblush: "#fff0f5", lawngreen: "#7cfc00", lemonchiffon: "#fffacd", lightblue: "#add8e6", lightcoral: "#f08080", lightcyan: "#e0ffff", lightgoldenrodyellow: "#fafad2", lightgray: "#d3d3d3", lightgreen: "#90ee90", lightgrey: "#d3d3d3", lightpink: "#ffb6c1", lightsalmon: "#ffa07a", lightseagreen: "#20b2aa", lightskyblue: "#87cefa", lightslategray: "#778899", lightslategrey: "#778899", lightsteelblue: "#b0c4de", lightyellow: "#ffffe0", lime: "#00ff00", limegreen: "#32cd32", linen: "#faf0e6", magenta: "#ff00ff", maroon: "#800000", mediumaquamarine: "#66cdaa", mediumblue: "#0000cd", mediumorchid: "#ba55d3", mediumpurple: "#9370db", mediumseagreen: "#3cb371", mediumslateblue: "#7b68ee", mediumspringgreen: "#00fa9a", mediumturquoise: "#48d1cc", mediumvioletred: "#c71585", midnightblue: "#191970", mintcream: "#f5fffa", mistyrose: "#ffe4e1", moccasin: "#ffe4b5", navajowhite: "#ffdead", navy: "#000080", oldlace: "#fdf5e6", olive: "#808000", olivedrab: "#6b8e23", orange: "#ffa500", orangered: "#ff4500", orchid: "#da70d6", palegoldenrod: "#eee8aa", palegreen: "#98fb98", paleturquoise: "#afeeee", palevioletred: "#db7093", papayawhip: "#ffefd5", peachpuff: "#ffdab9", peru: "#cd853f", pink: "#ffc0cb", plum: "#dda0dd", powderblue: "#b0e0e6", purple: "#800080", red: "#ff0000", rosybrown: "#bc8f8f", royalblue: "#4169e1", saddlebrown: "#8b4513", salmon: "#fa8072", sandybrown: "#f4a460", seagreen: "#2e8b57", seashell: "#fff5ee", sienna: "#a0522d", silver: "#c0c0c0", skyblue: "#87ceeb", slateblue: "#6a5acd", slategray: "#708090", slategrey: "#708090", snow: "#fffafa", springgreen: "#00ff7f", steelblue: "#4682b4", tan: "#d2b48c", teal: "#008080", thistle: "#d8bfd8", tomato: "#ff6347", turquoise: "#40e0d0", violet: "#ee82ee", wheat: "#f5deb3", white: "#ffffff", whitesmoke: "#f5f5f5", yellow: "#ffff00", yellowgreen: "#9acd32" }); d3_rgb_names.forEach(function(key, value) { d3_rgb_names.set(key, d3_rgb_parse(value, d3_rgb, d3_hsl_rgb)); }); d3.hsl = function(h, s, l) { return arguments.length === 1 ? h instanceof d3_Hsl ? d3_hsl(h.h, h.s, h.l) : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) : d3_hsl(+h, +s, +l); }; var d3_hslPrototype = d3_Hsl.prototype = new d3_Color; d3_hslPrototype.brighter = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_hsl(this.h, this.s, this.l / k); }; d3_hslPrototype.darker = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_hsl(this.h, this.s, k * this.l); }; d3_hslPrototype.rgb = function() { return d3_hsl_rgb(this.h, this.s, this.l); }; d3.hcl = function(h, c, l) { return arguments.length === 1 ? h instanceof d3_Hcl ? d3_hcl(h.h, h.c, h.l) : h instanceof d3_Lab ? d3_lab_hcl(h.l, h.a, h.b) : d3_lab_hcl((h = d3_rgb_lab((h = d3.rgb(h)).r, h.g, h.b)).l, h.a, h.b) : d3_hcl(+h, +c, +l); }; var d3_hclPrototype = d3_Hcl.prototype = new d3_Color; d3_hclPrototype.brighter = function(k) { return d3_hcl(this.h, this.c, Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1))); }; d3_hclPrototype.darker = function(k) { return d3_hcl(this.h, this.c, Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1))); }; d3_hclPrototype.rgb = function() { return d3_hcl_lab(this.h, this.c, this.l).rgb(); }; d3.lab = function(l, a, b) { return arguments.length === 1 ? l instanceof d3_Lab ? d3_lab(l.l, l.a, l.b) : l instanceof d3_Hcl ? d3_hcl_lab(l.l, l.c, l.h) : d3_rgb_lab((l = d3.rgb(l)).r, l.g, l.b) : d3_lab(+l, +a, +b); }; var d3_lab_K = 18; var d3_lab_X = .95047, d3_lab_Y = 1, d3_lab_Z = 1.08883; var d3_labPrototype = d3_Lab.prototype = new d3_Color; d3_labPrototype.brighter = function(k) { return d3_lab(Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); }; d3_labPrototype.darker = function(k) { return d3_lab(Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); }; d3_labPrototype.rgb = function() { return d3_lab_rgb(this.l, this.a, this.b); }; var d3_select = function(s, n) { return n.querySelector(s); }, d3_selectAll = function(s, n) { return n.querySelectorAll(s); }, d3_selectRoot = document.documentElement, d3_selectMatcher = d3_selectRoot.matchesSelector || d3_selectRoot.webkitMatchesSelector || d3_selectRoot.mozMatchesSelector || d3_selectRoot.msMatchesSelector || d3_selectRoot.oMatchesSelector, d3_selectMatches = function(n, s) { return d3_selectMatcher.call(n, s); }; if (typeof Sizzle === "function") { d3_select = function(s, n) { return Sizzle(s, n)[0] || null; }; d3_selectAll = function(s, n) { return Sizzle.uniqueSort(Sizzle(s, n)); }; d3_selectMatches = Sizzle.matchesSelector; } var d3_selectionPrototype = []; d3.selection = function() { return d3_selectionRoot; }; d3.selection.prototype = d3_selectionPrototype; d3_selectionPrototype.select = function(selector) { var subgroups = [], subgroup, subnode, group, node; if (typeof selector !== "function") selector = d3_selection_selector(selector); for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); subgroup.parentNode = (group = this[j]).parentNode; for (var i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroup.push(subnode = selector.call(node, node.__data__, i)); if (subnode && "__data__" in node) subnode.__data__ = node.__data__; } else { subgroup.push(null); } } } return d3_selection(subgroups); }; d3_selectionPrototype.selectAll = function(selector) { var subgroups = [], subgroup, node; if (typeof selector !== "function") selector = d3_selection_selectorAll(selector); for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i))); subgroup.parentNode = node; } } } return d3_selection(subgroups); }; d3_selectionPrototype.attr = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") { var node = this.node(); name = d3.ns.qualify(name); return name.local ? node.getAttributeNS(name.space, name.local) : node.getAttribute(name); } for (value in name) this.each(d3_selection_attr(value, name[value])); return this; } return this.each(d3_selection_attr(name, value)); }; d3_selectionPrototype.classed = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") { var node = this.node(), n = (name = name.trim().split(/^|\s+/g)).length, i = -1; if (value = node.classList) { while (++i < n) if (!value.contains(name[i])) return false; } else { value = node.className; if (value.baseVal != null) value = value.baseVal; while (++i < n) if (!d3_selection_classedRe(name[i]).test(value)) return false; } return true; } for (value in name) this.each(d3_selection_classed(value, name[value])); return this; } return this.each(d3_selection_classed(name, value)); }; d3_selectionPrototype.style = function(name, value, priority) { var n = arguments.length; if (n < 3) { if (typeof name !== "string") { if (n < 2) value = ""; for (priority in name) this.each(d3_selection_style(priority, name[priority], value)); return this; } if (n < 2) return window.getComputedStyle(this.node(), null).getPropertyValue(name); priority = ""; } return this.each(d3_selection_style(name, value, priority)); }; d3_selectionPrototype.property = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") return this.node()[name]; for (value in name) this.each(d3_selection_property(value, name[value])); return this; } return this.each(d3_selection_property(name, value)); }; d3_selectionPrototype.text = function(value) { return arguments.length < 1 ? this.node().textContent : this.each(typeof value === "function" ? function() { var v = value.apply(this, arguments); this.textContent = v == null ? "" : v; } : value == null ? function() { this.textContent = ""; } : function() { this.textContent = value; }); }; d3_selectionPrototype.html = function(value) { return arguments.length < 1 ? this.node().innerHTML : this.each(typeof value === "function" ? function() { var v = value.apply(this, arguments); this.innerHTML = v == null ? "" : v; } : value == null ? function() { this.innerHTML = ""; } : function() { this.innerHTML = value; }); }; d3_selectionPrototype.append = function(name) { function append() { return this.appendChild(document.createElementNS(this.namespaceURI, name)); } function appendNS() { return this.appendChild(document.createElementNS(name.space, name.local)); } name = d3.ns.qualify(name); return this.select(name.local ? appendNS : append); }; d3_selectionPrototype.insert = function(name, before) { function insert() { return this.insertBefore(document.createElementNS(this.namespaceURI, name), d3_select(before, this)); } function insertNS() { return this.insertBefore(document.createElementNS(name.space, name.local), d3_select(before, this)); } name = d3.ns.qualify(name); return this.select(name.local ? insertNS : insert); }; d3_selectionPrototype.remove = function() { return this.each(function() { var parent = this.parentNode; if (parent) parent.removeChild(this); }); }; d3_selectionPrototype.data = function(value, key) { function bind(group, groupData) { var i, n = group.length, m = groupData.length, n0 = Math.min(n, m), n1 = Math.max(n, m), updateNodes = [], enterNodes = [], exitNodes = [], node, nodeData; if (key) { var nodeByKeyValue = new d3_Map, keyValues = [], keyValue, j = groupData.length; for (i = -1; ++i < n; ) { keyValue = key.call(node = group[i], node.__data__, i); if (nodeByKeyValue.has(keyValue)) { exitNodes[j++] = node; } else { nodeByKeyValue.set(keyValue, node); } keyValues.push(keyValue); } for (i = -1; ++i < m; ) { keyValue = key.call(groupData, nodeData = groupData[i], i); if (nodeByKeyValue.has(keyValue)) { updateNodes[i] = node = nodeByKeyValue.get(keyValue); node.__data__ = nodeData; enterNodes[i] = exitNodes[i] = null; } else { enterNodes[i] = d3_selection_dataNode(nodeData); updateNodes[i] = exitNodes[i] = null; } nodeByKeyValue.remove(keyValue); } for (i = -1; ++i < n; ) { if (nodeByKeyValue.has(keyValues[i])) { exitNodes[i] = group[i]; } } } else { for (i = -1; ++i < n0; ) { node = group[i]; nodeData = groupData[i]; if (node) { node.__data__ = nodeData; updateNodes[i] = node; enterNodes[i] = exitNodes[i] = null; } else { enterNodes[i] = d3_selection_dataNode(nodeData); updateNodes[i] = exitNodes[i] = null; } } for (; i < m; ++i) { enterNodes[i] = d3_selection_dataNode(groupData[i]); updateNodes[i] = exitNodes[i] = null; } for (; i < n1; ++i) { exitNodes[i] = group[i]; enterNodes[i] = updateNodes[i] = null; } } enterNodes.update = updateNodes; enterNodes.parentNode = updateNodes.parentNode = exitNodes.parentNode = group.parentNode; enter.push(enterNodes); update.push(updateNodes); exit.push(exitNodes); } var i = -1, n = this.length, group, node; if (!arguments.length) { value = new Array(n = (group = this[0]).length); while (++i < n) { if (node = group[i]) { value[i] = node.__data__; } } return value; } var enter = d3_selection_enter([]), update = d3_selection([]), exit = d3_selection([]); if (typeof value === "function") { while (++i < n) { bind(group = this[i], value.call(group, group.parentNode.__data__, i)); } } else { while (++i < n) { bind(group = this[i], value); } } update.enter = function() { return enter; }; update.exit = function() { return exit; }; return update; }; d3_selectionPrototype.datum = d3_selectionPrototype.map = function(value) { return arguments.length < 1 ? this.property("__data__") : this.property("__data__", value); }; d3_selectionPrototype.filter = function(filter) { var subgroups = [], subgroup, group, node; if (typeof filter !== "function") filter = d3_selection_filter(filter); for (var j = 0, m = this.length; j < m; j++) { subgroups.push(subgroup = []); subgroup.parentNode = (group = this[j]).parentNode; for (var i = 0, n = group.length; i < n; i++) { if ((node = group[i]) && filter.call(node, node.__data__, i)) { subgroup.push(node); } } } return d3_selection(subgroups); }; d3_selectionPrototype.order = function() { for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = group.length - 1, next = group[i], node; --i >= 0; ) { if (node = group[i]) { if (next && next !== node.nextSibling) next.parentNode.insertBefore(node, next); next = node; } } } return this; }; d3_selectionPrototype.sort = function(comparator) { comparator = d3_selection_sortComparator.apply(this, arguments); for (var j = -1, m = this.length; ++j < m; ) this[j].sort(comparator); return this.order(); }; d3_selectionPrototype.on = function(type, listener, capture) { var n = arguments.length; if (n < 3) { if (typeof type !== "string") { if (n < 2) listener = false; for (capture in type) this.each(d3_selection_on(capture, type[capture], listener)); return this; } if (n < 2) return (n = this.node()["__on" + type]) && n._; capture = false; } return this.each(d3_selection_on(type, listener, capture)); }; d3_selectionPrototype.each = function(callback) { return d3_selection_each(this, function(node, i, j) { callback.call(node, node.__data__, i, j); }); }; d3_selectionPrototype.call = function(callback) { callback.apply(this, (arguments[0] = this, arguments)); return this; }; d3_selectionPrototype.empty = function() { return !this.node(); }; d3_selectionPrototype.node = function(callback) { for (var j = 0, m = this.length; j < m; j++) { for (var group = this[j], i = 0, n = group.length; i < n; i++) { var node = group[i]; if (node) return node; } } return null; }; d3_selectionPrototype.transition = function() { var subgroups = [], subgroup, node; for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); for (var group = this[j], i = -1, n = group.length; ++i < n; ) { subgroup.push((node = group[i]) ? { node: node, delay: d3_transitionDelay, duration: d3_transitionDuration } : null); } } return d3_transition(subgroups, d3_transitionId || ++d3_transitionNextId, Date.now()); }; var d3_selectionRoot = d3_selection([ [ document ] ]); d3_selectionRoot[0].parentNode = d3_selectRoot; d3.select = function(selector) { return typeof selector === "string" ? d3_selectionRoot.select(selector) : d3_selection([ [ selector ] ]); }; d3.selectAll = function(selector) { return typeof selector === "string" ? d3_selectionRoot.selectAll(selector) : d3_selection([ d3_array(selector) ]); }; var d3_selection_enterPrototype = []; d3.selection.enter = d3_selection_enter; d3.selection.enter.prototype = d3_selection_enterPrototype; d3_selection_enterPrototype.append = d3_selectionPrototype.append; d3_selection_enterPrototype.insert = d3_selectionPrototype.insert; d3_selection_enterPrototype.empty = d3_selectionPrototype.empty; d3_selection_enterPrototype.node = d3_selectionPrototype.node; d3_selection_enterPrototype.select = function(selector) { var subgroups = [], subgroup, subnode, upgroup, group, node; for (var j = -1, m = this.length; ++j < m; ) { upgroup = (group = this[j]).update; subgroups.push(subgroup = []); subgroup.parentNode = group.parentNode; for (var i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i)); subnode.__data__ = node.__data__; } else { subgroup.push(null); } } } return d3_selection(subgroups); }; var d3_transitionPrototype = [], d3_transitionNextId = 0, d3_transitionId = 0, d3_transitionDefaultDelay = 0, d3_transitionDefaultDuration = 250, d3_transitionDefaultEase = d3.ease("cubic-in-out"), d3_transitionDelay = d3_transitionDefaultDelay, d3_transitionDuration = d3_transitionDefaultDuration, d3_transitionEase = d3_transitionDefaultEase; d3_transitionPrototype.call = d3_selectionPrototype.call; d3.transition = function(selection) { return arguments.length ? d3_transitionId ? selection.transition() : selection : d3_selectionRoot.transition(); }; d3.transition.prototype = d3_transitionPrototype; d3_transitionPrototype.select = function(selector) { var subgroups = [], subgroup, subnode, node; if (typeof selector !== "function") selector = d3_selection_selector(selector); for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if ((node = group[i]) && (subnode = selector.call(node.node, node.node.__data__, i))) { if ("__data__" in node.node) subnode.__data__ = node.node.__data__; subgroup.push({ node: subnode, delay: node.delay, duration: node.duration }); } else { subgroup.push(null); } } } return d3_transition(subgroups, this.id, this.time).ease(this.ease()); }; d3_transitionPrototype.selectAll = function(selector) { var subgroups = [], subgroup, subnodes, node; if (typeof selector !== "function") selector = d3_selection_selectorAll(selector); for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subnodes = selector.call(node.node, node.node.__data__, i); subgroups.push(subgroup = []); for (var k = -1, o = subnodes.length; ++k < o; ) { subgroup.push({ node: subnodes[k], delay: node.delay, duration: node.duration }); } } } } return d3_transition(subgroups, this.id, this.time).ease(this.ease()); }; d3_transitionPrototype.filter = function(filter) { var subgroups = [], subgroup, group, node; if (typeof filter !== "function") filter = d3_selection_filter(filter); for (var j = 0, m = this.length; j < m; j++) { subgroups.push(subgroup = []); for (var group = this[j], i = 0, n = group.length; i < n; i++) { if ((node = group[i]) && filter.call(node.node, node.node.__data__, i)) { subgroup.push(node); } } } return d3_transition(subgroups, this.id, this.time).ease(this.ease()); }; d3_transitionPrototype.attr = function(name, value) { if (arguments.length < 2) { for (value in name) this.attrTween(value, d3_tweenByName(name[value], value)); return this; } return this.attrTween(name, d3_tweenByName(value, name)); }; d3_transitionPrototype.attrTween = function(nameNS, tween) { function attrTween(d, i) { var f = tween.call(this, d, i, this.getAttribute(name)); return f === d3_tweenRemove ? (this.removeAttribute(name), null) : f && function(t) { this.setAttribute(name, f(t)); }; } function attrTweenNS(d, i) { var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local)); return f === d3_tweenRemove ? (this.removeAttributeNS(name.space, name.local), null) : f && function(t) { this.setAttributeNS(name.space, name.local, f(t)); }; } var name = d3.ns.qualify(nameNS); return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween); }; d3_transitionPrototype.style = function(name, value, priority) { var n = arguments.length; if (n < 3) { if (typeof name !== "string") { if (n < 2) value = ""; for (priority in name) this.styleTween(priority, d3_tweenByName(name[priority], priority), value); return this; } priority = ""; } return this.styleTween(name, d3_tweenByName(value, name), priority); }; d3_transitionPrototype.styleTween = function(name, tween, priority) { if (arguments.length < 3) priority = ""; return this.tween("style." + name, function(d, i) { var f = tween.call(this, d, i, window.getComputedStyle(this, null).getPropertyValue(name)); return f === d3_tweenRemove ? (this.style.removeProperty(name), null) : f && function(t) { this.style.setProperty(name, f(t), priority); }; }); }; d3_transitionPrototype.text = function(value) { return this.tween("text", function(d, i) { this.textContent = typeof value === "function" ? value.call(this, d, i) : value; }); }; d3_transitionPrototype.remove = function() { return this.each("end.transition", function() { var p; if (!this.__transition__ && (p = this.parentNode)) p.removeChild(this); }); }; d3_transitionPrototype.delay = function(value) { return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { node.delay = value.call(node = node.node, node.__data__, i, j) | 0; } : (value = value | 0, function(node) { node.delay = value; })); }; d3_transitionPrototype.duration = function(value) { return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { node.duration = Math.max(1, value.call(node = node.node, node.__data__, i, j) | 0); } : (value = Math.max(1, value | 0), function(node) { node.duration = value; })); }; d3_transitionPrototype.transition = function() { return this.select(d3_this); }; d3.tween = function(b, interpolate) { function tweenFunction(d, i, a) { var v = b.call(this, d, i); return v == null ? a != "" && d3_tweenRemove : a != v && interpolate(a, v + ""); } function tweenString(d, i, a) { return a != b && interpolate(a, b); } return typeof b === "function" ? tweenFunction : b == null ? d3_tweenNull : (b += "", tweenString); }; var d3_tweenRemove = {}; var d3_timer_id = 0, d3_timer_byId = {}, d3_timer_queue = null, d3_timer_interval, d3_timer_timeout; d3.timer = function(callback, delay, then) { if (arguments.length < 3) { if (arguments.length < 2) delay = 0; else if (!isFinite(delay)) return; then = Date.now(); } var timer = d3_timer_byId[callback.id]; if (timer && timer.callback === callback) { timer.then = then; timer.delay = delay; } else d3_timer_byId[callback.id = ++d3_timer_id] = d3_timer_queue = { callback: callback, then: then, delay: delay, next: d3_timer_queue }; if (!d3_timer_interval) { d3_timer_timeout = clearTimeout(d3_timer_timeout); d3_timer_interval = 1; d3_timer_frame(d3_timer_step); } }; d3.timer.flush = function() { var elapsed, now = Date.now(), t1 = d3_timer_queue; while (t1) { elapsed = now - t1.then; if (!t1.delay) t1.flush = t1.callback(elapsed); t1 = t1.next; } d3_timer_flush(); }; var d3_timer_frame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function(callback) { setTimeout(callback, 17); }; d3.mouse = function(container) { return d3_mousePoint(container, d3_eventSource()); }; var d3_mouse_bug44083 = /WebKit/.test(navigator.userAgent) ? -1 : 0; d3.touches = function(container, touches) { if (arguments.length < 2) touches = d3_eventSource().touches; return touches ? d3_array(touches).map(function(touch) { var point = d3_mousePoint(container, touch); point.identifier = touch.identifier; return point; }) : []; }; d3.scale = {}; d3.scale.linear = function() { return d3_scale_linear([ 0, 1 ], [ 0, 1 ], d3.interpolate, false); }; d3.scale.log = function() { return d3_scale_log(d3.scale.linear(), d3_scale_logp); }; var d3_scale_logFormat = d3.format(".0e"); d3_scale_logp.pow = function(x) { return Math.pow(10, x); }; d3_scale_logn.pow = function(x) { return -Math.pow(10, -x); }; d3.scale.pow = function() { return d3_scale_pow(d3.scale.linear(), 1); }; d3.scale.sqrt = function() { return d3.scale.pow().exponent(.5); }; d3.scale.ordinal = function() { return d3_scale_ordinal([], { t: "range", a: [ [] ] }); }; d3.scale.category10 = function() { return d3.scale.ordinal().range(d3_category10); }; d3.scale.category20 = function() { return d3.scale.ordinal().range(d3_category20); }; d3.scale.category20b = function() { return d3.scale.ordinal().range(d3_category20b); }; d3.scale.category20c = function() { return d3.scale.ordinal().range(d3_category20c); }; var d3_category10 = [ "#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd", "#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf" ]; var d3_category20 = [ "#1f77b4", "#aec7e8", "#ff7f0e", "#ffbb78", "#2ca02c", "#98df8a", "#d62728", "#ff9896", "#9467bd", "#c5b0d5", "#8c564b", "#c49c94", "#e377c2", "#f7b6d2", "#7f7f7f", "#c7c7c7", "#bcbd22", "#dbdb8d", "#17becf", "#9edae5" ]; var d3_category20b = [ "#393b79", "#5254a3", "#6b6ecf", "#9c9ede", "#637939", "#8ca252", "#b5cf6b", "#cedb9c", "#8c6d31", "#bd9e39", "#e7ba52", "#e7cb94", "#843c39", "#ad494a", "#d6616b", "#e7969c", "#7b4173", "#a55194", "#ce6dbd", "#de9ed6" ]; var d3_category20c = [ "#3182bd", "#6baed6", "#9ecae1", "#c6dbef", "#e6550d", "#fd8d3c", "#fdae6b", "#fdd0a2", "#31a354", "#74c476", "#a1d99b", "#c7e9c0", "#756bb1", "#9e9ac8", "#bcbddc", "#dadaeb", "#636363", "#969696", "#bdbdbd", "#d9d9d9" ]; d3.scale.quantile = function() { return d3_scale_quantile([], []); }; d3.scale.quantize = function() { return d3_scale_quantize(0, 1, [ 0, 1 ]); }; d3.scale.threshold = function() { return d3_scale_threshold([ .5 ], [ 0, 1 ]); }; d3.scale.identity = function() { return d3_scale_identity([ 0, 1 ]); }; d3.svg = {}; d3.svg.arc = function() { function arc() { var r0 = innerRadius.apply(this, arguments), r1 = outerRadius.apply(this, arguments), a0 = startAngle.apply(this, arguments) + d3_svg_arcOffset, a1 = endAngle.apply(this, arguments) + d3_svg_arcOffset, da = (a1 < a0 && (da = a0, a0 = a1, a1 = da), a1 - a0), df = da < Math.PI ? "0" : "1", c0 = Math.cos(a0), s0 = Math.sin(a0), c1 = Math.cos(a1), s1 = Math.sin(a1); return da >= d3_svg_arcMax ? r0 ? "M0," + r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + -r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + "M0," + r0 + "A" + r0 + "," + r0 + " 0 1,0 0," + -r0 + "A" + r0 + "," + r0 + " 0 1,0 0," + r0 + "Z" : "M0," + r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + -r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + "Z" : r0 ? "M" + r1 * c0 + "," + r1 * s0 + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + "L" + r0 * c1 + "," + r0 * s1 + "A" + r0 + "," + r0 + " 0 " + df + ",0 " + r0 * c0 + "," + r0 * s0 + "Z" : "M" + r1 * c0 + "," + r1 * s0 + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + "L0,0" + "Z"; } var innerRadius = d3_svg_arcInnerRadius, outerRadius = d3_svg_arcOuterRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle; arc.innerRadius = function(v) { if (!arguments.length) return innerRadius; innerRadius = d3_functor(v); return arc; }; arc.outerRadius = function(v) { if (!arguments.length) return outerRadius; outerRadius = d3_functor(v); return arc; }; arc.startAngle = function(v) { if (!arguments.length) return startAngle; startAngle = d3_functor(v); return arc; }; arc.endAngle = function(v) { if (!arguments.length) return endAngle; endAngle = d3_functor(v); return arc; }; arc.centroid = function() { var r = (innerRadius.apply(this, arguments) + outerRadius.apply(this, arguments)) / 2, a = (startAngle.apply(this, arguments) + endAngle.apply(this, arguments)) / 2 + d3_svg_arcOffset; return [ Math.cos(a) * r, Math.sin(a) * r ]; }; return arc; }; var d3_svg_arcOffset = -Math.PI / 2, d3_svg_arcMax = 2 * Math.PI - 1e-6; d3.svg.line = function() { return d3_svg_line(d3_identity); }; var d3_svg_lineInterpolators = d3.map({ linear: d3_svg_lineLinear, "linear-closed": d3_svg_lineLinearClosed, "step-before": d3_svg_lineStepBefore, "step-after": d3_svg_lineStepAfter, basis: d3_svg_lineBasis, "basis-open": d3_svg_lineBasisOpen, "basis-closed": d3_svg_lineBasisClosed, bundle: d3_svg_lineBundle, cardinal: d3_svg_lineCardinal, "cardinal-open": d3_svg_lineCardinalOpen, "cardinal-closed": d3_svg_lineCardinalClosed, monotone: d3_svg_lineMonotone }); d3_svg_lineInterpolators.forEach(function(key, value) { value.key = key; value.closed = /-closed$/.test(key); }); var d3_svg_lineBasisBezier1 = [ 0, 2 / 3, 1 / 3, 0 ], d3_svg_lineBasisBezier2 = [ 0, 1 / 3, 2 / 3, 0 ], d3_svg_lineBasisBezier3 = [ 0, 1 / 6, 2 / 3, 1 / 6 ]; d3.svg.line.radial = function() { var line = d3_svg_line(d3_svg_lineRadial); line.radius = line.x, delete line.x; line.angle = line.y, delete line.y; return line; }; d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter; d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore; d3.svg.area = function() { return d3_svg_area(d3_identity); }; d3.svg.area.radial = function() { var area = d3_svg_area(d3_svg_lineRadial); area.radius = area.x, delete area.x; area.innerRadius = area.x0, delete area.x0; area.outerRadius = area.x1, delete area.x1; area.angle = area.y, delete area.y; area.startAngle = area.y0, delete area.y0; area.endAngle = area.y1, delete area.y1; return area; }; d3.svg.chord = function() { function chord(d, i) { var s = subgroup(this, source, d, i), t = subgroup(this, target, d, i); return "M" + s.p0 + arc(s.r, s.p1, s.a1 - s.a0) + (equals(s, t) ? curve(s.r, s.p1, s.r, s.p0) : curve(s.r, s.p1, t.r, t.p0) + arc(t.r, t.p1, t.a1 - t.a0) + curve(t.r, t.p1, s.r, s.p0)) + "Z"; } function subgroup(self, f, d, i) { var subgroup = f.call(self, d, i), r = radius.call(self, subgroup, i), a0 = startAngle.call(self, subgroup, i) + d3_svg_arcOffset, a1 = endAngle.call(self, subgroup, i) + d3_svg_arcOffset; return { r: r, a0: a0, a1: a1, p0: [ r * Math.cos(a0), r * Math.sin(a0) ], p1: [ r * Math.cos(a1), r * Math.sin(a1) ] }; } function equals(a, b) { return a.a0 == b.a0 && a.a1 == b.a1; } function arc(r, p, a) { return "A" + r + "," + r + " 0 " + +(a > Math.PI) + ",1 " + p; } function curve(r0, p0, r1, p1) { return "Q 0,0 " + p1; } var source = d3_svg_chordSource, target = d3_svg_chordTarget, radius = d3_svg_chordRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle; chord.radius = function(v) { if (!arguments.length) return radius; radius = d3_functor(v); return chord; }; chord.source = function(v) { if (!arguments.length) return source; source = d3_functor(v); return chord; }; chord.target = function(v) { if (!arguments.length) return target; target = d3_functor(v); return chord; }; chord.startAngle = function(v) { if (!arguments.length) return startAngle; startAngle = d3_functor(v); return chord; }; chord.endAngle = function(v) { if (!arguments.length) return endAngle; endAngle = d3_functor(v); return chord; }; return chord; }; d3.svg.diagonal = function() { function diagonal(d, i) { var p0 = source.call(this, d, i), p3 = target.call(this, d, i), m = (p0.y + p3.y) / 2, p = [ p0, { x: p0.x, y: m }, { x: p3.x, y: m }, p3 ]; p = p.map(projection); return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3]; } var source = d3_svg_chordSource, target = d3_svg_chordTarget, projection = d3_svg_diagonalProjection; diagonal.source = function(x) { if (!arguments.length) return source; source = d3_functor(x); return diagonal; }; diagonal.target = function(x) { if (!arguments.length) return target; target = d3_functor(x); return diagonal; }; diagonal.projection = function(x) { if (!arguments.length) return projection; projection = x; return diagonal; }; return diagonal; }; d3.svg.diagonal.radial = function() { var diagonal = d3.svg.diagonal(), projection = d3_svg_diagonalProjection, projection_ = diagonal.projection; diagonal.projection = function(x) { return arguments.length ? projection_(d3_svg_diagonalRadialProjection(projection = x)) : projection; }; return diagonal; }; d3.svg.mouse = d3.mouse; d3.svg.touches = d3.touches; d3.svg.symbol = function() { function symbol(d, i) { return (d3_svg_symbols.get(type.call(this, d, i)) || d3_svg_symbolCircle)(size.call(this, d, i)); } var type = d3_svg_symbolType, size = d3_svg_symbolSize; symbol.type = function(x) { if (!arguments.length) return type; type = d3_functor(x); return symbol; }; symbol.size = function(x) { if (!arguments.length) return size; size = d3_functor(x); return symbol; }; return symbol; }; var d3_svg_symbols = d3.map({ circle: d3_svg_symbolCircle, cross: function(size) { var r = Math.sqrt(size / 5) / 2; return "M" + -3 * r + "," + -r + "H" + -r + "V" + -3 * r + "H" + r + "V" + -r + "H" + 3 * r + "V" + r + "H" + r + "V" + 3 * r + "H" + -r + "V" + r + "H" + -3 * r + "Z"; }, diamond: function(size) { var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)), rx = ry * d3_svg_symbolTan30; return "M0," + -ry + "L" + rx + ",0" + " 0," + ry + " " + -rx + ",0" + "Z"; }, square: function(size) { var r = Math.sqrt(size) / 2; return "M" + -r + "," + -r + "L" + r + "," + -r + " " + r + "," + r + " " + -r + "," + r + "Z"; }, "triangle-down": function(size) { var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; return "M0," + ry + "L" + rx + "," + -ry + " " + -rx + "," + -ry + "Z"; }, "triangle-up": function(size) { var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; return "M0," + -ry + "L" + rx + "," + ry + " " + -rx + "," + ry + "Z"; } }); d3.svg.symbolTypes = d3_svg_symbols.keys(); var d3_svg_symbolSqrt3 = Math.sqrt(3), d3_svg_symbolTan30 = Math.tan(30 * Math.PI / 180); d3.svg.axis = function() { function axis(g) { g.each(function() { var g = d3.select(this); var ticks = tickValues == null ? scale.ticks ? scale.ticks.apply(scale, tickArguments_) : scale.domain() : tickValues, tickFormat = tickFormat_ == null ? scale.tickFormat ? scale.tickFormat.apply(scale, tickArguments_) : String : tickFormat_; var subticks = d3_svg_axisSubdivide(scale, ticks, tickSubdivide), subtick = g.selectAll(".minor").data(subticks, String), subtickEnter = subtick.enter().insert("line", "g").attr("class", "tick minor").style("opacity", 1e-6), subtickExit = d3.transition(subtick.exit()).style("opacity", 1e-6).remove(), subtickUpdate = d3.transition(subtick).style("opacity", 1); var tick = g.selectAll("g").data(ticks, String), tickEnter = tick.enter().insert("g", "path").style("opacity", 1e-6), tickExit = d3.transition(tick.exit()).style("opacity", 1e-6).remove(), tickUpdate = d3.transition(tick).style("opacity", 1), tickTransform; var range = d3_scaleRange(scale), path = g.selectAll(".domain").data([ 0 ]), pathEnter = path.enter().append("path").attr("class", "domain"), pathUpdate = d3.transition(path); var scale1 = scale.copy(), scale0 = this.__chart__ || scale1; this.__chart__ = scale1; tickEnter.append("line").attr("class", "tick"); tickEnter.append("text"); var lineEnter = tickEnter.select("line"), lineUpdate = tickUpdate.select("line"), text = tick.select("text").text(tickFormat), textEnter = tickEnter.select("text"), textUpdate = tickUpdate.select("text"); switch (orient) { case "bottom": { tickTransform = d3_svg_axisX; subtickEnter.attr("y2", tickMinorSize); subtickUpdate.attr("x2", 0).attr("y2", tickMinorSize); lineEnter.attr("y2", tickMajorSize); textEnter.attr("y", Math.max(tickMajorSize, 0) + tickPadding); lineUpdate.attr("x2", 0).attr("y2", tickMajorSize); textUpdate.attr("x", 0).attr("y", Math.max(tickMajorSize, 0) + tickPadding); text.attr("dy", ".71em").attr("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + tickEndSize + "V0H" + range[1] + "V" + tickEndSize); break; } case "top": { tickTransform = d3_svg_axisX; subtickEnter.attr("y2", -tickMinorSize); subtickUpdate.attr("x2", 0).attr("y2", -tickMinorSize); lineEnter.attr("y2", -tickMajorSize); textEnter.attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)); lineUpdate.attr("x2", 0).attr("y2", -tickMajorSize); textUpdate.attr("x", 0).attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)); text.attr("dy", "0em").attr("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + -tickEndSize + "V0H" + range[1] + "V" + -tickEndSize); break; } case "left": { tickTransform = d3_svg_axisY; subtickEnter.attr("x2", -tickMinorSize); subtickUpdate.attr("x2", -tickMinorSize).attr("y2", 0); lineEnter.attr("x2", -tickMajorSize); textEnter.attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)); lineUpdate.attr("x2", -tickMajorSize).attr("y2", 0); textUpdate.attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)).attr("y", 0); text.attr("dy", ".32em").attr("text-anchor", "end"); pathUpdate.attr("d", "M" + -tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + -tickEndSize); break; } case "right": { tickTransform = d3_svg_axisY; subtickEnter.attr("x2", tickMinorSize); subtickUpdate.attr("x2", tickMinorSize).attr("y2", 0); lineEnter.attr("x2", tickMajorSize); textEnter.attr("x", Math.max(tickMajorSize, 0) + tickPadding); lineUpdate.attr("x2", tickMajorSize).attr("y2", 0); textUpdate.attr("x", Math.max(tickMajorSize, 0) + tickPadding).attr("y", 0); text.attr("dy", ".32em").attr("text-anchor", "start"); pathUpdate.attr("d", "M" + tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + tickEndSize); break; } } if (scale.ticks) { tickEnter.call(tickTransform, scale0); tickUpdate.call(tickTransform, scale1); tickExit.call(tickTransform, scale1); subtickEnter.call(tickTransform, scale0); subtickUpdate.call(tickTransform, scale1); subtickExit.call(tickTransform, scale1); } else { var dx = scale1.rangeBand() / 2, x = function(d) { return scale1(d) + dx; }; tickEnter.call(tickTransform, x); tickUpdate.call(tickTransform, x); } }); } var scale = d3.scale.linear(), orient = "bottom", tickMajorSize = 6, tickMinorSize = 6, tickEndSize = 6, tickPadding = 3, tickArguments_ = [ 10 ], tickValues = null, tickFormat_, tickSubdivide = 0; axis.scale = function(x) { if (!arguments.length) return scale; scale = x; return axis; }; axis.orient = function(x) { if (!arguments.length) return orient; orient = x; return axis; }; axis.ticks = function() { if (!arguments.length) return tickArguments_; tickArguments_ = arguments; return axis; }; axis.tickValues = function(x) { if (!arguments.length) return tickValues; tickValues = x; return axis; }; axis.tickFormat = function(x) { if (!arguments.length) return tickFormat_; tickFormat_ = x; return axis; }; axis.tickSize = function(x, y, z) { if (!arguments.length) return tickMajorSize; var n = arguments.length - 1; tickMajorSize = +x; tickMinorSize = n > 1 ? +y : tickMajorSize; tickEndSize = n > 0 ? +arguments[n] : tickMajorSize; return axis; }; axis.tickPadding = function(x) { if (!arguments.length) return tickPadding; tickPadding = +x; return axis; }; axis.tickSubdivide = function(x) { if (!arguments.length) return tickSubdivide; tickSubdivide = +x; return axis; }; return axis; }; d3.svg.brush = function() { function brush(g) { g.each(function() { var g = d3.select(this), bg = g.selectAll(".background").data([ 0 ]), fg = g.selectAll(".extent").data([ 0 ]), tz = g.selectAll(".resize").data(resizes, String), e; g.style("pointer-events", "all").on("mousedown.brush", brushstart).on("touchstart.brush", brushstart); bg.enter().append("rect").attr("class", "background").style("visibility", "hidden").style("cursor", "crosshair"); fg.enter().append("rect").attr("class", "extent").style("cursor", "move"); tz.enter().append("g").attr("class", function(d) { return "resize " + d; }).style("cursor", function(d) { return d3_svg_brushCursor[d]; }).append("rect").attr("x", function(d) { return /[ew]$/.test(d) ? -3 : null; }).attr("y", function(d) { return /^[ns]/.test(d) ? -3 : null; }).attr("width", 6).attr("height", 6).style("visibility", "hidden"); tz.style("display", brush.empty() ? "none" : null); tz.exit().remove(); if (x) { e = d3_scaleRange(x); bg.attr("x", e[0]).attr("width", e[1] - e[0]); redrawX(g); } if (y) { e = d3_scaleRange(y); bg.attr("y", e[0]).attr("height", e[1] - e[0]); redrawY(g); } redraw(g); }); } function redraw(g) { g.selectAll(".resize").attr("transform", function(d) { return "translate(" + extent[+/e$/.test(d)][0] + "," + extent[+/^s/.test(d)][1] + ")"; }); } function redrawX(g) { g.select(".extent").attr("x", extent[0][0]); g.selectAll(".extent,.n>rect,.s>rect").attr("width", extent[1][0] - extent[0][0]); } function redrawY(g) { g.select(".extent").attr("y", extent[0][1]); g.selectAll(".extent,.e>rect,.w>rect").attr("height", extent[1][1] - extent[0][1]); } function brushstart() { function mouse() { var touches = d3.event.changedTouches; return touches ? d3.touches(target, touches)[0] : d3.mouse(target); } function keydown() { if (d3.event.keyCode == 32) { if (!dragging) { center = null; origin[0] -= extent[1][0]; origin[1] -= extent[1][1]; dragging = 2; } d3_eventCancel(); } } function keyup() { if (d3.event.keyCode == 32 && dragging == 2) { origin[0] += extent[1][0]; origin[1] += extent[1][1]; dragging = 0; d3_eventCancel(); } } function brushmove() { var point = mouse(), moved = false; if (offset) { point[0] += offset[0]; point[1] += offset[1]; } if (!dragging) { if (d3.event.altKey) { if (!center) center = [ (extent[0][0] + extent[1][0]) / 2, (extent[0][1] + extent[1][1]) / 2 ]; origin[0] = extent[+(point[0] < center[0])][0]; origin[1] = extent[+(point[1] < center[1])][1]; } else center = null; } if (resizingX && move1(point, x, 0)) { redrawX(g); moved = true; } if (resizingY && move1(point, y, 1)) { redrawY(g); moved = true; } if (moved) { redraw(g); event_({ type: "brush", mode: dragging ? "move" : "resize" }); } } function move1(point, scale, i) { var range = d3_scaleRange(scale), r0 = range[0], r1 = range[1], position = origin[i], size = extent[1][i] - extent[0][i], min, max; if (dragging) { r0 -= position; r1 -= size + position; } min = Math.max(r0, Math.min(r1, point[i])); if (dragging) { max = (min += position) + size; } else { if (center) position = Math.max(r0, Math.min(r1, 2 * center[i] - min)); if (position < min) { max = min; min = position; } else { max = position; } } if (extent[0][i] !== min || extent[1][i] !== max) { extentDomain = null; extent[0][i] = min; extent[1][i] = max; return true; } } function brushend() { brushmove(); g.style("pointer-events", "all").selectAll(".resize").style("display", brush.empty() ? "none" : null); d3.select("body").style("cursor", null); w.on("mousemove.brush", null).on("mouseup.brush", null).on("touchmove.brush", null).on("touchend.brush", null).on("keydown.brush", null).on("keyup.brush", null); event_({ type: "brushend" }); d3_eventCancel(); } var target = this, eventTarget = d3.select(d3.event.target), event_ = event.of(target, arguments), g = d3.select(target), resizing = eventTarget.datum(), resizingX = !/^(n|s)$/.test(resizing) && x, resizingY = !/^(e|w)$/.test(resizing) && y, dragging = eventTarget.classed("extent"), center, origin = mouse(), offset; var w = d3.select(window).on("mousemove.brush", brushmove).on("mouseup.brush", brushend).on("touchmove.brush", brushmove).on("touchend.brush", brushend).on("keydown.brush", keydown).on("keyup.brush", keyup); if (dragging) { origin[0] = extent[0][0] - origin[0]; origin[1] = extent[0][1] - origin[1]; } else if (resizing) { var ex = +/w$/.test(resizing), ey = +/^n/.test(resizing); offset = [ extent[1 - ex][0] - origin[0], extent[1 - ey][1] - origin[1] ]; origin[0] = extent[ex][0]; origin[1] = extent[ey][1]; } else if (d3.event.altKey) center = origin.slice(); g.style("pointer-events", "none").selectAll(".resize").style("display", null); d3.select("body").style("cursor", eventTarget.style("cursor")); event_({ type: "brushstart" }); brushmove(); d3_eventCancel(); } var event = d3_eventDispatch(brush, "brushstart", "brush", "brushend"), x = null, y = null, resizes = d3_svg_brushResizes[0], extent = [ [ 0, 0 ], [ 0, 0 ] ], extentDomain; brush.x = function(z) { if (!arguments.length) return x; x = z; resizes = d3_svg_brushResizes[!x << 1 | !y]; return brush; }; brush.y = function(z) { if (!arguments.length) return y; y = z; resizes = d3_svg_brushResizes[!x << 1 | !y]; return brush; }; brush.extent = function(z) { var x0, x1, y0, y1, t; if (!arguments.length) { z = extentDomain || extent; if (x) { x0 = z[0][0], x1 = z[1][0]; if (!extentDomain) { x0 = extent[0][0], x1 = extent[1][0]; if (x.invert) x0 = x.invert(x0), x1 = x.invert(x1); if (x1 < x0) t = x0, x0 = x1, x1 = t; } } if (y) { y0 = z[0][1], y1 = z[1][1]; if (!extentDomain) { y0 = extent[0][1], y1 = extent[1][1]; if (y.invert) y0 = y.invert(y0), y1 = y.invert(y1); if (y1 < y0) t = y0, y0 = y1, y1 = t; } } return x && y ? [ [ x0, y0 ], [ x1, y1 ] ] : x ? [ x0, x1 ] : y && [ y0, y1 ]; } extentDomain = [ [ 0, 0 ], [ 0, 0 ] ]; if (x) { x0 = z[0], x1 = z[1]; if (y) x0 = x0[0], x1 = x1[0]; extentDomain[0][0] = x0, extentDomain[1][0] = x1; if (x.invert) x0 = x(x0), x1 = x(x1); if (x1 < x0) t = x0, x0 = x1, x1 = t; extent[0][0] = x0 | 0, extent[1][0] = x1 | 0; } if (y) { y0 = z[0], y1 = z[1]; if (x) y0 = y0[1], y1 = y1[1]; extentDomain[0][1] = y0, extentDomain[1][1] = y1; if (y.invert) y0 = y(y0), y1 = y(y1); if (y1 < y0) t = y0, y0 = y1, y1 = t; extent[0][1] = y0 | 0, extent[1][1] = y1 | 0; } return brush; }; brush.clear = function() { extentDomain = null; extent[0][0] = extent[0][1] = extent[1][0] = extent[1][1] = 0; return brush; }; brush.empty = function() { return x && extent[0][0] === extent[1][0] || y && extent[0][1] === extent[1][1]; }; return d3.rebind(brush, event, "on"); }; var d3_svg_brushCursor = { n: "ns-resize", e: "ew-resize", s: "ns-resize", w: "ew-resize", nw: "nwse-resize", ne: "nesw-resize", se: "nwse-resize", sw: "nesw-resize" }; var d3_svg_brushResizes = [ [ "n", "e", "s", "w", "nw", "ne", "se", "sw" ], [ "e", "w" ], [ "n", "s" ], [] ]; d3.behavior = {}; d3.behavior.drag = function() { function drag() { this.on("mousedown.drag", mousedown).on("touchstart.drag", mousedown); } function mousedown() { function point() { var p = target.parentNode; return touchId ? d3.touches(p).filter(function(p) { return p.identifier === touchId; })[0] : d3.mouse(p); } function dragmove() { if (!target.parentNode) return dragend(); var p = point(), dx = p[0] - origin_[0], dy = p[1] - origin_[1]; moved |= dx | dy; origin_ = p; d3_eventCancel(); event_({ type: "drag", x: p[0] + offset[0], y: p[1] + offset[1], dx: dx, dy: dy }); } function dragend() { event_({ type: "dragend" }); if (moved) { d3_eventCancel(); if (d3.event.target === eventTarget) w.on("click.drag", click, true); } w.on(touchId ? "touchmove.drag-" + touchId : "mousemove.drag", null).on(touchId ? "touchend.drag-" + touchId : "mouseup.drag", null); } function click() { d3_eventCancel(); w.on("click.drag", null); } var target = this, event_ = event.of(target, arguments), eventTarget = d3.event.target, touchId = d3.event.touches && d3.event.changedTouches[0].identifier, offset, origin_ = point(), moved = 0; var w = d3.select(window).on(touchId ? "touchmove.drag-" + touchId : "mousemove.drag", dragmove).on(touchId ? "touchend.drag-" + touchId : "mouseup.drag", dragend, true); if (origin) { offset = origin.apply(target, arguments); offset = [ offset.x - origin_[0], offset.y - origin_[1] ]; } else { offset = [ 0, 0 ]; } if (!touchId) d3_eventCancel(); event_({ type: "dragstart" }); } var event = d3_eventDispatch(drag, "drag", "dragstart", "dragend"), origin = null; drag.origin = function(x) { if (!arguments.length) return origin; origin = x; return drag; }; return d3.rebind(drag, event, "on"); }; d3.behavior.zoom = function() { function zoom() { this.on("mousedown.zoom", mousedown).on("mousewheel.zoom", mousewheel).on("mousemove.zoom", mousemove).on("DOMMouseScroll.zoom", mousewheel).on("dblclick.zoom", dblclick).on("touchstart.zoom", touchstart).on("touchmove.zoom", touchmove).on("touchend.zoom", touchstart); } function location(p) { return [ (p[0] - translate[0]) / scale, (p[1] - translate[1]) / scale ]; } function point(l) { return [ l[0] * scale + translate[0], l[1] * scale + translate[1] ]; } function scaleTo(s) { scale = Math.max(scaleExtent[0], Math.min(scaleExtent[1], s)); } function translateTo(p, l) { l = point(l); translate[0] += p[0] - l[0]; translate[1] += p[1] - l[1]; } function dispatch(event) { if (x1) x1.domain(x0.range().map(function(x) { return (x - translate[0]) / scale; }).map(x0.invert)); if (y1) y1.domain(y0.range().map(function(y) { return (y - translate[1]) / scale; }).map(y0.invert)); d3.event.preventDefault(); event({ type: "zoom", scale: scale, translate: translate }); } function mousedown() { function mousemove() { moved = 1; translateTo(d3.mouse(target), l); dispatch(event_); } function mouseup() { if (moved) d3_eventCancel(); w.on("mousemove.zoom", null).on("mouseup.zoom", null); if (moved && d3.event.target === eventTarget) w.on("click.zoom", click, true); } function click() { d3_eventCancel(); w.on("click.zoom", null); } var target = this, event_ = event.of(target, arguments), eventTarget = d3.event.target, moved = 0, w = d3.select(window).on("mousemove.zoom", mousemove).on("mouseup.zoom", mouseup), l = location(d3.mouse(target)); window.focus(); d3_eventCancel(); } function mousewheel() { if (!translate0) translate0 = location(d3.mouse(this)); scaleTo(Math.pow(2, d3_behavior_zoomDelta() * .002) * scale); translateTo(d3.mouse(this), translate0); dispatch(event.of(this, arguments)); } function mousemove() { translate0 = null; } function dblclick() { var p = d3.mouse(this), l = location(p); scaleTo(d3.event.shiftKey ? scale / 2 : scale * 2); translateTo(p, l); dispatch(event.of(this, arguments)); } function touchstart() { var touches = d3.touches(this), now = Date.now(); scale0 = scale; translate0 = {}; touches.forEach(function(t) { translate0[t.identifier] = location(t); }); d3_eventCancel(); if (touches.length === 1) { if (now - touchtime < 500) { var p = touches[0], l = location(touches[0]); scaleTo(scale * 2); translateTo(p, l); dispatch(event.of(this, arguments)); } touchtime = now; } } function touchmove() { var touches = d3.touches(this), p0 = touches[0], l0 = translate0[p0.identifier]; if (p1 = touches[1]) { var p1, l1 = translate0[p1.identifier]; p0 = [ (p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2 ]; l0 = [ (l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2 ]; scaleTo(d3.event.scale * scale0); } translateTo(p0, l0); touchtime = null; dispatch(event.of(this, arguments)); } var translate = [ 0, 0 ], translate0, scale = 1, scale0, scaleExtent = d3_behavior_zoomInfinity, event = d3_eventDispatch(zoom, "zoom"), x0, x1, y0, y1, touchtime; zoom.translate = function(x) { if (!arguments.length) return translate; translate = x.map(Number); return zoom; }; zoom.scale = function(x) { if (!arguments.length) return scale; scale = +x; return zoom; }; zoom.scaleExtent = function(x) { if (!arguments.length) return scaleExtent; scaleExtent = x == null ? d3_behavior_zoomInfinity : x.map(Number); return zoom; }; zoom.x = function(z) { if (!arguments.length) return x1; x1 = z; x0 = z.copy(); return zoom; }; zoom.y = function(z) { if (!arguments.length) return y1; y1 = z; y0 = z.copy(); return zoom; }; return d3.rebind(zoom, event, "on"); }; var d3_behavior_zoomDiv, d3_behavior_zoomInfinity = [ 0, Infinity ]; d3.layout = {}; d3.layout.bundle = function() { return function(links) { var paths = [], i = -1, n = links.length; while (++i < n) paths.push(d3_layout_bundlePath(links[i])); return paths; }; }; d3.layout.chord = function() { function relayout() { var subgroups = {}, groupSums = [], groupIndex = d3.range(n), subgroupIndex = [], k, x, x0, i, j; chords = []; groups = []; k = 0, i = -1; while (++i < n) { x = 0, j = -1; while (++j < n) { x += matrix[i][j]; } groupSums.push(x); subgroupIndex.push(d3.range(n)); k += x; } if (sortGroups) { groupIndex.sort(function(a, b) { return sortGroups(groupSums[a], groupSums[b]); }); } if (sortSubgroups) { subgroupIndex.forEach(function(d, i) { d.sort(function(a, b) { return sortSubgroups(matrix[i][a], matrix[i][b]); }); }); } k = (2 * Math.PI - padding * n) / k; x = 0, i = -1; while (++i < n) { x0 = x, j = -1; while (++j < n) { var di = groupIndex[i], dj = subgroupIndex[di][j], v = matrix[di][dj], a0 = x, a1 = x += v * k; subgroups[di + "-" + dj] = { index: di, subindex: dj, startAngle: a0, endAngle: a1, value: v }; } groups[di] = { index: di, startAngle: x0, endAngle: x, value: (x - x0) / k }; x += padding; } i = -1; while (++i < n) { j = i - 1; while (++j < n) { var source = subgroups[i + "-" + j], target = subgroups[j + "-" + i]; if (source.value || target.value) { chords.push(source.value < target.value ? { source: target, target: source } : { source: source, target: target }); } } } if (sortChords) resort(); } function resort() { chords.sort(function(a, b) { return sortChords((a.source.value + a.target.value) / 2, (b.source.value + b.target.value) / 2); }); } var chord = {}, chords, groups, matrix, n, padding = 0, sortGroups, sortSubgroups, sortChords; chord.matrix = function(x) { if (!arguments.length) return matrix; n = (matrix = x) && matrix.length; chords = groups = null; return chord; }; chord.padding = function(x) { if (!arguments.length) return padding; padding = x; chords = groups = null; return chord; }; chord.sortGroups = function(x) { if (!arguments.length) return sortGroups; sortGroups = x; chords = groups = null; return chord; }; chord.sortSubgroups = function(x) { if (!arguments.length) return sortSubgroups; sortSubgroups = x; chords = null; return chord; }; chord.sortChords = function(x) { if (!arguments.length) return sortChords; sortChords = x; if (chords) resort(); return chord; }; chord.chords = function() { if (!chords) relayout(); return chords; }; chord.groups = function() { if (!groups) relayout(); return groups; }; return chord; }; d3.layout.force = function() { function repulse(node) { return function(quad, x1, y1, x2, y2) { if (quad.point !== node) { var dx = quad.cx - node.x, dy = quad.cy - node.y, dn = 1 / Math.sqrt(dx * dx + dy * dy); if ((x2 - x1) * dn < theta) { var k = quad.charge * dn * dn; node.px -= dx * k; node.py -= dy * k; return true; } if (quad.point && isFinite(dn)) { var k = quad.pointCharge * dn * dn; node.px -= dx * k; node.py -= dy * k; } } return !quad.charge; }; } function dragmove(d) { d.px = d3.event.x; d.py = d3.event.y; force.resume(); } var force = {}, event = d3.dispatch("start", "tick", "end"), size = [ 1, 1 ], drag, alpha, friction = .9, linkDistance = d3_layout_forceLinkDistance, linkStrength = d3_layout_forceLinkStrength, charge = -30, gravity = .1, theta = .8, interval, nodes = [], links = [], distances, strengths, charges; force.tick = function() { if ((alpha *= .99) < .005) { event.end({ type: "end", alpha: alpha = 0 }); return true; } var n = nodes.length, m = links.length, q, i, o, s, t, l, k, x, y; for (i = 0; i < m; ++i) { o = links[i]; s = o.source; t = o.target; x = t.x - s.x; y = t.y - s.y; if (l = x * x + y * y) { l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l; x *= l; y *= l; t.x -= x * (k = s.weight / (t.weight + s.weight)); t.y -= y * k; s.x += x * (k = 1 - k); s.y += y * k; } } if (k = alpha * gravity) { x = size[0] / 2; y = size[1] / 2; i = -1; if (k) while (++i < n) { o = nodes[i]; o.x += (x - o.x) * k; o.y += (y - o.y) * k; } } if (charge) { d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges); i = -1; while (++i < n) { if (!(o = nodes[i]).fixed) { q.visit(repulse(o)); } } } i = -1; while (++i < n) { o = nodes[i]; if (o.fixed) { o.x = o.px; o.y = o.py; } else { o.x -= (o.px - (o.px = o.x)) * friction; o.y -= (o.py - (o.py = o.y)) * friction; } } event.tick({ type: "tick", alpha: alpha }); }; force.nodes = function(x) { if (!arguments.length) return nodes; nodes = x; return force; }; force.links = function(x) { if (!arguments.length) return links; links = x; return force; }; force.size = function(x) { if (!arguments.length) return size; size = x; return force; }; force.linkDistance = function(x) { if (!arguments.length) return linkDistance; linkDistance = d3_functor(x); return force; }; force.distance = force.linkDistance; force.linkStrength = function(x) { if (!arguments.length) return linkStrength; linkStrength = d3_functor(x); return force; }; force.friction = function(x) { if (!arguments.length) return friction; friction = x; return force; }; force.charge = function(x) { if (!arguments.length) return charge; charge = typeof x === "function" ? x : +x; return force; }; force.gravity = function(x) { if (!arguments.length) return gravity; gravity = x; return force; }; force.theta = function(x) { if (!arguments.length) return theta; theta = x; return force; }; force.alpha = function(x) { if (!arguments.length) return alpha; if (alpha) { if (x > 0) alpha = x; else alpha = 0; } else if (x > 0) { event.start({ type: "start", alpha: alpha = x }); d3.timer(force.tick); } return force; }; force.start = function() { function position(dimension, size) { var neighbors = neighbor(i), j = -1, m = neighbors.length, x; while (++j < m) if (!isNaN(x = neighbors[j][dimension])) return x; return Math.random() * size; } function neighbor() { if (!neighbors) { neighbors = []; for (j = 0; j < n; ++j) { neighbors[j] = []; } for (j = 0; j < m; ++j) { var o = links[j]; neighbors[o.source.index].push(o.target); neighbors[o.target.index].push(o.source); } } return neighbors[i]; } var i, j, n = nodes.length, m = links.length, w = size[0], h = size[1], neighbors, o; for (i = 0; i < n; ++i) { (o = nodes[i]).index = i; o.weight = 0; } distances = []; strengths = []; for (i = 0; i < m; ++i) { o = links[i]; if (typeof o.source == "number") o.source = nodes[o.source]; if (typeof o.target == "number") o.target = nodes[o.target]; distances[i] = linkDistance.call(this, o, i); strengths[i] = linkStrength.call(this, o, i); ++o.source.weight; ++o.target.weight; } for (i = 0; i < n; ++i) { o = nodes[i]; if (isNaN(o.x)) o.x = position("x", w); if (isNaN(o.y)) o.y = position("y", h); if (isNaN(o.px)) o.px = o.x; if (isNaN(o.py)) o.py = o.y; } charges = []; if (typeof charge === "function") { for (i = 0; i < n; ++i) { charges[i] = +charge.call(this, nodes[i], i); } } else { for (i = 0; i < n; ++i) { charges[i] = charge; } } return force.resume(); }; force.resume = function() { return force.alpha(.1); }; force.stop = function() { return force.alpha(0); }; force.drag = function() { if (!drag) drag = d3.behavior.drag().origin(d3_identity).on("dragstart", d3_layout_forceDragstart).on("drag", dragmove).on("dragend", d3_layout_forceDragend); this.on("mouseover.force", d3_layout_forceMouseover).on("mouseout.force", d3_layout_forceMouseout).call(drag); }; return d3.rebind(force, event, "on"); }; d3.layout.partition = function() { function position(node, x, dx, dy) { var children = node.children; node.x = x; node.y = node.depth * dy; node.dx = dx; node.dy = dy; if (children && (n = children.length)) { var i = -1, n, c, d; dx = node.value ? dx / node.value : 0; while (++i < n) { position(c = children[i], x, d = c.value * dx, dy); x += d; } } } function depth(node) { var children = node.children, d = 0; if (children && (n = children.length)) { var i = -1, n; while (++i < n) d = Math.max(d, depth(children[i])); } return 1 + d; } function partition(d, i) { var nodes = hierarchy.call(this, d, i); position(nodes[0], 0, size[0], size[1] / depth(nodes[0])); return nodes; } var hierarchy = d3.layout.hierarchy(), size = [ 1, 1 ]; partition.size = function(x) { if (!arguments.length) return size; size = x; return partition; }; return d3_layout_hierarchyRebind(partition, hierarchy); }; d3.layout.pie = function() { function pie(data, i) { var values = data.map(function(d, i) { return +value.call(pie, d, i); }); var a = +(typeof startAngle === "function" ? startAngle.apply(this, arguments) : startAngle); var k = ((typeof endAngle === "function" ? endAngle.apply(this, arguments) : endAngle) - startAngle) / d3.sum(values); var index = d3.range(data.length); if (sort != null) index.sort(sort === d3_layout_pieSortByValue ? function(i, j) { return values[j] - values[i]; } : function(i, j) { return sort(data[i], data[j]); }); var arcs = []; index.forEach(function(i) { var d; arcs[i] = { data: data[i], value: d = values[i], startAngle: a, endAngle: a += d * k }; }); return arcs; } var value = Number, sort = d3_layout_pieSortByValue, startAngle = 0, endAngle = 2 * Math.PI; pie.value = function(x) { if (!arguments.length) return value; value = x; return pie; }; pie.sort = function(x) { if (!arguments.length) return sort; sort = x; return pie; }; pie.startAngle = function(x) { if (!arguments.length) return startAngle; startAngle = x; return pie; }; pie.endAngle = function(x) { if (!arguments.length) return endAngle; endAngle = x; return pie; }; return pie; }; var d3_layout_pieSortByValue = {}; d3.layout.stack = function() { function stack(data, index) { var series = data.map(function(d, i) { return values.call(stack, d, i); }); var points = series.map(function(d, i) { return d.map(function(v, i) { return [ x.call(stack, v, i), y.call(stack, v, i) ]; }); }); var orders = order.call(stack, points, index); series = d3.permute(series, orders); points = d3.permute(points, orders); var offsets = offset.call(stack, points, index); var n = series.length, m = series[0].length, i, j, o; for (j = 0; j < m; ++j) { out.call(stack, series[0][j], o = offsets[j], points[0][j][1]); for (i = 1; i < n; ++i) { out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]); } } return data; } var values = d3_identity, order = d3_layout_stackOrderDefault, offset = d3_layout_stackOffsetZero, out = d3_layout_stackOut, x = d3_layout_stackX, y = d3_layout_stackY; stack.values = function(x) { if (!arguments.length) return values; values = x; return stack; }; stack.order = function(x) { if (!arguments.length) return order; order = typeof x === "function" ? x : d3_layout_stackOrders.get(x) || d3_layout_stackOrderDefault; return stack; }; stack.offset = function(x) { if (!arguments.length) return offset; offset = typeof x === "function" ? x : d3_layout_stackOffsets.get(x) || d3_layout_stackOffsetZero; return stack; }; stack.x = function(z) { if (!arguments.length) return x; x = z; return stack; }; stack.y = function(z) { if (!arguments.length) return y; y = z; return stack; }; stack.out = function(z) { if (!arguments.length) return out; out = z; return stack; }; return stack; }; var d3_layout_stackOrders = d3.map({ "inside-out": function(data) { var n = data.length, i, j, max = data.map(d3_layout_stackMaxIndex), sums = data.map(d3_layout_stackReduceSum), index = d3.range(n).sort(function(a, b) { return max[a] - max[b]; }), top = 0, bottom = 0, tops = [], bottoms = []; for (i = 0; i < n; ++i) { j = index[i]; if (top < bottom) { top += sums[j]; tops.push(j); } else { bottom += sums[j]; bottoms.push(j); } } return bottoms.reverse().concat(tops); }, reverse: function(data) { return d3.range(data.length).reverse(); }, "default": d3_layout_stackOrderDefault }); var d3_layout_stackOffsets = d3.map({ silhouette: function(data) { var n = data.length, m = data[0].length, sums = [], max = 0, i, j, o, y0 = []; for (j = 0; j < m; ++j) { for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; if (o > max) max = o; sums.push(o); } for (j = 0; j < m; ++j) { y0[j] = (max - sums[j]) / 2; } return y0; }, wiggle: function(data) { var n = data.length, x = data[0], m = x.length, max = 0, i, j, k, s1, s2, s3, dx, o, o0, y0 = []; y0[0] = o = o0 = 0; for (j = 1; j < m; ++j) { for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1]; for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) { for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) { s3 += (data[k][j][1] - data[k][j - 1][1]) / dx; } s2 += s3 * data[i][j][1]; } y0[j] = o -= s1 ? s2 / s1 * dx : 0; if (o < o0) o0 = o; } for (j = 0; j < m; ++j) y0[j] -= o0; return y0; }, expand: function(data) { var n = data.length, m = data[0].length, k = 1 / n, i, j, o, y0 = []; for (j = 0; j < m; ++j) { for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; if (o) for (i = 0; i < n; i++) data[i][j][1] /= o; else for (i = 0; i < n; i++) data[i][j][1] = k; } for (j = 0; j < m; ++j) y0[j] = 0; return y0; }, zero: d3_layout_stackOffsetZero }); d3.layout.histogram = function() { function histogram(data, i) { var bins = [], values = data.map(valuer, this), range = ranger.call(this, values, i), thresholds = binner.call(this, range, values, i), bin, i = -1, n = values.length, m = thresholds.length - 1, k = frequency ? 1 : 1 / n, x; while (++i < m) { bin = bins[i] = []; bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]); bin.y = 0; } if (m > 0) { i = -1; while (++i < n) { x = values[i]; if (x >= range[0] && x <= range[1]) { bin = bins[d3.bisect(thresholds, x, 1, m) - 1]; bin.y += k; bin.push(data[i]); } } } return bins; } var frequency = true, valuer = Number, ranger = d3_layout_histogramRange, binner = d3_layout_histogramBinSturges; histogram.value = function(x) { if (!arguments.length) return valuer; valuer = x; return histogram; }; histogram.range = function(x) { if (!arguments.length) return ranger; ranger = d3_functor(x); return histogram; }; histogram.bins = function(x) { if (!arguments.length) return binner; binner = typeof x === "number" ? function(range) { return d3_layout_histogramBinFixed(range, x); } : d3_functor(x); return histogram; }; histogram.frequency = function(x) { if (!arguments.length) return frequency; frequency = !!x; return histogram; }; return histogram; }; d3.layout.hierarchy = function() { function recurse(data, depth, nodes) { var childs = children.call(hierarchy, data, depth), node = d3_layout_hierarchyInline ? data : { data: data }; node.depth = depth; nodes.push(node); if (childs && (n = childs.length)) { var i = -1, n, c = node.children = [], v = 0, j = depth + 1, d; while (++i < n) { d = recurse(childs[i], j, nodes); d.parent = node; c.push(d); v += d.value; } if (sort) c.sort(sort); if (value) node.value = v; } else if (value) { node.value = +value.call(hierarchy, data, depth) || 0; } return node; } function revalue(node, depth) { var children = node.children, v = 0; if (children && (n = children.length)) { var i = -1, n, j = depth + 1; while (++i < n) v += revalue(children[i], j); } else if (value) { v = +value.call(hierarchy, d3_layout_hierarchyInline ? node : node.data, depth) || 0; } if (value) node.value = v; return v; } function hierarchy(d) { var nodes = []; recurse(d, 0, nodes); return nodes; } var sort = d3_layout_hierarchySort, children = d3_layout_hierarchyChildren, value = d3_layout_hierarchyValue; hierarchy.sort = function(x) { if (!arguments.length) return sort; sort = x; return hierarchy; }; hierarchy.children = function(x) { if (!arguments.length) return children; children = x; return hierarchy; }; hierarchy.value = function(x) { if (!arguments.length) return value; value = x; return hierarchy; }; hierarchy.revalue = function(root) { revalue(root, 0); return root; }; return hierarchy; }; var d3_layout_hierarchyInline = false; d3.layout.pack = function() { function pack(d, i) { var nodes = hierarchy.call(this, d, i), root = nodes[0]; root.x = 0; root.y = 0; d3_layout_treeVisitAfter(root, function(d) { d.r = Math.sqrt(d.value); }); d3_layout_treeVisitAfter(root, d3_layout_packSiblings); var w = size[0], h = size[1], k = Math.max(2 * root.r / w, 2 * root.r / h); if (padding > 0) { var dr = padding * k / 2; d3_layout_treeVisitAfter(root, function(d) { d.r += dr; }); d3_layout_treeVisitAfter(root, d3_layout_packSiblings); d3_layout_treeVisitAfter(root, function(d) { d.r -= dr; }); k = Math.max(2 * root.r / w, 2 * root.r / h); } d3_layout_packTransform(root, w / 2, h / 2, 1 / k); return nodes; } var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort), padding = 0, size = [ 1, 1 ]; pack.size = function(x) { if (!arguments.length) return size; size = x; return pack; }; pack.padding = function(_) { if (!arguments.length) return padding; padding = +_; return pack; }; return d3_layout_hierarchyRebind(pack, hierarchy); }; d3.layout.cluster = function() { function cluster(d, i) { var nodes = hierarchy.call(this, d, i), root = nodes[0], previousNode, x = 0, kx, ky; d3_layout_treeVisitAfter(root, function(node) { var children = node.children; if (children && children.length) { node.x = d3_layout_clusterX(children); node.y = d3_layout_clusterY(children); } else { node.x = previousNode ? x += separation(node, previousNode) : 0; node.y = 0; previousNode = node; } }); var left = d3_layout_clusterLeft(root), right = d3_layout_clusterRight(root), x0 = left.x - separation(left, right) / 2, x1 = right.x + separation(right, left) / 2; d3_layout_treeVisitAfter(root, function(node) { node.x = (node.x - x0) / (x1 - x0) * size[0]; node.y = (1 - (root.y ? node.y / root.y : 1)) * size[1]; }); return nodes; } var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ]; cluster.separation = function(x) { if (!arguments.length) return separation; separation = x; return cluster; }; cluster.size = function(x) { if (!arguments.length) return size; size = x; return cluster; }; return d3_layout_hierarchyRebind(cluster, hierarchy); }; d3.layout.tree = function() { function tree(d, i) { function firstWalk(node, previousSibling) { var children = node.children, layout = node._tree; if (children && (n = children.length)) { var n, firstChild = children[0], previousChild, ancestor = firstChild, child, i = -1; while (++i < n) { child = children[i]; firstWalk(child, previousChild); ancestor = apportion(child, previousChild, ancestor); previousChild = child; } d3_layout_treeShift(node); var midpoint = .5 * (firstChild._tree.prelim + child._tree.prelim); if (previousSibling) { layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling); layout.mod = layout.prelim - midpoint; } else { layout.prelim = midpoint; } } else { if (previousSibling) { layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling); } } } function secondWalk(node, x) { node.x = node._tree.prelim + x; var children = node.children; if (children && (n = children.length)) { var i = -1, n; x += node._tree.mod; while (++i < n) { secondWalk(children[i], x); } } } function apportion(node, previousSibling, ancestor) { if (previousSibling) { var vip = node, vop = node, vim = previousSibling, vom = node.parent.children[0], sip = vip._tree.mod, sop = vop._tree.mod, sim = vim._tree.mod, som = vom._tree.mod, shift; while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) { vom = d3_layout_treeLeft(vom); vop = d3_layout_treeRight(vop); vop._tree.ancestor = node; shift = vim._tree.prelim + sim - vip._tree.prelim - sip + separation(vim, vip); if (shift > 0) { d3_layout_treeMove(d3_layout_treeAncestor(vim, node, ancestor), node, shift); sip += shift; sop += shift; } sim += vim._tree.mod; sip += vip._tree.mod; som += vom._tree.mod; sop += vop._tree.mod; } if (vim && !d3_layout_treeRight(vop)) { vop._tree.thread = vim; vop._tree.mod += sim - sop; } if (vip && !d3_layout_treeLeft(vom)) { vom._tree.thread = vip; vom._tree.mod += sip - som; ancestor = node; } } return ancestor; } var nodes = hierarchy.call(this, d, i), root = nodes[0]; d3_layout_treeVisitAfter(root, function(node, previousSibling) { node._tree = { ancestor: node, prelim: 0, mod: 0, change: 0, shift: 0, number: previousSibling ? previousSibling._tree.number + 1 : 0 }; }); firstWalk(root); secondWalk(root, -root._tree.prelim); var left = d3_layout_treeSearch(root, d3_layout_treeLeftmost), right = d3_layout_treeSearch(root, d3_layout_treeRightmost), deep = d3_layout_treeSearch(root, d3_layout_treeDeepest), x0 = left.x - separation(left, right) / 2, x1 = right.x + separation(right, left) / 2, y1 = deep.depth || 1; d3_layout_treeVisitAfter(root, function(node) { node.x = (node.x - x0) / (x1 - x0) * size[0]; node.y = node.depth / y1 * size[1]; delete node._tree; }); return nodes; } var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ]; tree.separation = function(x) { if (!arguments.length) return separation; separation = x; return tree; }; tree.size = function(x) { if (!arguments.length) return size; size = x; return tree; }; return d3_layout_hierarchyRebind(tree, hierarchy); }; d3.layout.treemap = function() { function scale(children, k) { var i = -1, n = children.length, child, area; while (++i < n) { area = (child = children[i]).value * (k < 0 ? 0 : k); child.area = isNaN(area) || area <= 0 ? 0 : area; } } function squarify(node) { var children = node.children; if (children && children.length) { var rect = pad(node), row = [], remaining = children.slice(), child, best = Infinity, score, u = Math.min(rect.dx, rect.dy), n; scale(remaining, rect.dx * rect.dy / node.value); row.area = 0; while ((n = remaining.length) > 0) { row.push(child = remaining[n - 1]); row.area += child.area; if ((score = worst(row, u)) <= best) { remaining.pop(); best = score; } else { row.area -= row.pop().area; position(row, u, rect, false); u = Math.min(rect.dx, rect.dy); row.length = row.area = 0; best = Infinity; } } if (row.length) { position(row, u, rect, true); row.length = row.area = 0; } children.forEach(squarify); } } function stickify(node) { var children = node.children; if (children && children.length) { var rect = pad(node), remaining = children.slice(), child, row = []; scale(remaining, rect.dx * rect.dy / node.value); row.area = 0; while (child = remaining.pop()) { row.push(child); row.area += child.area; if (child.z != null) { position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length); row.length = row.area = 0; } } children.forEach(stickify); } } function worst(row, u) { var s = row.area, r, rmax = 0, rmin = Infinity, i = -1, n = row.length; while (++i < n) { if (!(r = row[i].area)) continue; if (r < rmin) rmin = r; if (r > rmax) rmax = r; } s *= s; u *= u; return s ? Math.max(u * rmax * ratio / s, s / (u * rmin * ratio)) : Infinity; } function position(row, u, rect, flush) { var i = -1, n = row.length, x = rect.x, y = rect.y, v = u ? round(row.area / u) : 0, o; if (u == rect.dx) { if (flush || v > rect.dy) v = rect.dy; while (++i < n) { o = row[i]; o.x = x; o.y = y; o.dy = v; x += o.dx = Math.min(rect.x + rect.dx - x, v ? round(o.area / v) : 0); } o.z = true; o.dx += rect.x + rect.dx - x; rect.y += v; rect.dy -= v; } else { if (flush || v > rect.dx) v = rect.dx; while (++i < n) { o = row[i]; o.x = x; o.y = y; o.dx = v; y += o.dy = Math.min(rect.y + rect.dy - y, v ? round(o.area / v) : 0); } o.z = false; o.dy += rect.y + rect.dy - y; rect.x += v; rect.dx -= v; } } function treemap(d) { var nodes = stickies || hierarchy(d), root = nodes[0]; root.x = 0; root.y = 0; root.dx = size[0]; root.dy = size[1]; if (stickies) hierarchy.revalue(root); scale([ root ], root.dx * root.dy / root.value); (stickies ? stickify : squarify)(root); if (sticky) stickies = nodes; return nodes; } var hierarchy = d3.layout.hierarchy(), round = Math.round, size = [ 1, 1 ], padding = null, pad = d3_layout_treemapPadNull, sticky = false, stickies, ratio = .5 * (1 + Math.sqrt(5)); treemap.size = function(x) { if (!arguments.length) return size; size = x; return treemap; }; treemap.padding = function(x) { function padFunction(node) { var p = x.call(treemap, node, node.depth); return p == null ? d3_layout_treemapPadNull(node) : d3_layout_treemapPad(node, typeof p === "number" ? [ p, p, p, p ] : p); } function padConstant(node) { return d3_layout_treemapPad(node, x); } if (!arguments.length) return padding; var type; pad = (padding = x) == null ? d3_layout_treemapPadNull : (type = typeof x) === "function" ? padFunction : type === "number" ? (x = [ x, x, x, x ], padConstant) : padConstant; return treemap; }; treemap.round = function(x) { if (!arguments.length) return round != Number; round = x ? Math.round : Number; return treemap; }; treemap.sticky = function(x) { if (!arguments.length) return sticky; sticky = x; stickies = null; return treemap; }; treemap.ratio = function(x) { if (!arguments.length) return ratio; ratio = x; return treemap; }; return d3_layout_hierarchyRebind(treemap, hierarchy); }; d3.csv = d3_dsv(",", "text/csv"); d3.tsv = d3_dsv(" ", "text/tab-separated-values"); d3.geo = {}; var d3_geo_radians = Math.PI / 180; d3.geo.azimuthal = function() { function azimuthal(coordinates) { var x1 = coordinates[0] * d3_geo_radians - x0, y1 = coordinates[1] * d3_geo_radians, cx1 = Math.cos(x1), sx1 = Math.sin(x1), cy1 = Math.cos(y1), sy1 = Math.sin(y1), cc = mode !== "orthographic" ? sy0 * sy1 + cy0 * cy1 * cx1 : null, c, k = mode === "stereographic" ? 1 / (1 + cc) : mode === "gnomonic" ? 1 / cc : mode === "equidistant" ? (c = Math.acos(cc), c ? c / Math.sin(c) : 0) : mode === "equalarea" ? Math.sqrt(2 / (1 + cc)) : 1, x = k * cy1 * sx1, y = k * (sy0 * cy1 * cx1 - cy0 * sy1); return [ scale * x + translate[0], scale * y + translate[1] ]; } var mode = "orthographic", origin, scale = 200, translate = [ 480, 250 ], x0, y0, cy0, sy0; azimuthal.invert = function(coordinates) { var x = (coordinates[0] - translate[0]) / scale, y = (coordinates[1] - translate[1]) / scale, p = Math.sqrt(x * x + y * y), c = mode === "stereographic" ? 2 * Math.atan(p) : mode === "gnomonic" ? Math.atan(p) : mode === "equidistant" ? p : mode === "equalarea" ? 2 * Math.asin(.5 * p) : Math.asin(p), sc = Math.sin(c), cc = Math.cos(c); return [ (x0 + Math.atan2(x * sc, p * cy0 * cc + y * sy0 * sc)) / d3_geo_radians, Math.asin(cc * sy0 - (p ? y * sc * cy0 / p : 0)) / d3_geo_radians ]; }; azimuthal.mode = function(x) { if (!arguments.length) return mode; mode = x + ""; return azimuthal; }; azimuthal.origin = function(x) { if (!arguments.length) return origin; origin = x; x0 = origin[0] * d3_geo_radians; y0 = origin[1] * d3_geo_radians; cy0 = Math.cos(y0); sy0 = Math.sin(y0); return azimuthal; }; azimuthal.scale = function(x) { if (!arguments.length) return scale; scale = +x; return azimuthal; }; azimuthal.translate = function(x) { if (!arguments.length) return translate; translate = [ +x[0], +x[1] ]; return azimuthal; }; return azimuthal.origin([ 0, 0 ]); }; d3.geo.albers = function() { function albers(coordinates) { var t = n * (d3_geo_radians * coordinates[0] - lng0), p = Math.sqrt(C - 2 * n * Math.sin(d3_geo_radians * coordinates[1])) / n; return [ scale * p * Math.sin(t) + translate[0], scale * (p * Math.cos(t) - p0) + translate[1] ]; } function reload() { var phi1 = d3_geo_radians * parallels[0], phi2 = d3_geo_radians * parallels[1], lat0 = d3_geo_radians * origin[1], s = Math.sin(phi1), c = Math.cos(phi1); lng0 = d3_geo_radians * origin[0]; n = .5 * (s + Math.sin(phi2)); C = c * c + 2 * n * s; p0 = Math.sqrt(C - 2 * n * Math.sin(lat0)) / n; return albers; } var origin = [ -98, 38 ], parallels = [ 29.5, 45.5 ], scale = 1e3, translate = [ 480, 250 ], lng0, n, C, p0; albers.invert = function(coordinates) { var x = (coordinates[0] - translate[0]) / scale, y = (coordinates[1] - translate[1]) / scale, p0y = p0 + y, t = Math.atan2(x, p0y), p = Math.sqrt(x * x + p0y * p0y); return [ (lng0 + t / n) / d3_geo_radians, Math.asin((C - p * p * n * n) / (2 * n)) / d3_geo_radians ]; }; albers.origin = function(x) { if (!arguments.length) return origin; origin = [ +x[0], +x[1] ]; return reload(); }; albers.parallels = function(x) { if (!arguments.length) return parallels; parallels = [ +x[0], +x[1] ]; return reload(); }; albers.scale = function(x) { if (!arguments.length) return scale; scale = +x; return albers; }; albers.translate = function(x) { if (!arguments.length) return translate; translate = [ +x[0], +x[1] ]; return albers; }; return reload(); }; d3.geo.albersUsa = function() { function albersUsa(coordinates) { var lon = coordinates[0], lat = coordinates[1]; return (lat > 50 ? alaska : lon < -140 ? hawaii : lat < 21 ? puertoRico : lower48)(coordinates); } var lower48 = d3.geo.albers(); var alaska = d3.geo.albers().origin([ -160, 60 ]).parallels([ 55, 65 ]); var hawaii = d3.geo.albers().origin([ -160, 20 ]).parallels([ 8, 18 ]); var puertoRico = d3.geo.albers().origin([ -60, 10 ]).parallels([ 8, 18 ]); albersUsa.scale = function(x) { if (!arguments.length) return lower48.scale(); lower48.scale(x); alaska.scale(x * .6); hawaii.scale(x); puertoRico.scale(x * 1.5); return albersUsa.translate(lower48.translate()); }; albersUsa.translate = function(x) { if (!arguments.length) return lower48.translate(); var dz = lower48.scale() / 1e3, dx = x[0], dy = x[1]; lower48.translate(x); alaska.translate([ dx - 400 * dz, dy + 170 * dz ]); hawaii.translate([ dx - 190 * dz, dy + 200 * dz ]); puertoRico.translate([ dx + 580 * dz, dy + 430 * dz ]); return albersUsa; }; return albersUsa.scale(lower48.scale()); }; d3.geo.bonne = function() { function bonne(coordinates) { var x = coordinates[0] * d3_geo_radians - x0, y = coordinates[1] * d3_geo_radians - y0; if (y1) { var p = c1 + y1 - y, E = x * Math.cos(y) / p; x = p * Math.sin(E); y = p * Math.cos(E) - c1; } else { x *= Math.cos(y); y *= -1; } return [ scale * x + translate[0], scale * y + translate[1] ]; } var scale = 200, translate = [ 480, 250 ], x0, y0, y1, c1; bonne.invert = function(coordinates) { var x = (coordinates[0] - translate[0]) / scale, y = (coordinates[1] - translate[1]) / scale; if (y1) { var c = c1 + y, p = Math.sqrt(x * x + c * c); y = c1 + y1 - p; x = x0 + p * Math.atan2(x, c) / Math.cos(y); } else { y *= -1; x /= Math.cos(y); } return [ x / d3_geo_radians, y / d3_geo_radians ]; }; bonne.parallel = function(x) { if (!arguments.length) return y1 / d3_geo_radians; c1 = 1 / Math.tan(y1 = x * d3_geo_radians); return bonne; }; bonne.origin = function(x) { if (!arguments.length) return [ x0 / d3_geo_radians, y0 / d3_geo_radians ]; x0 = x[0] * d3_geo_radians; y0 = x[1] * d3_geo_radians; return bonne; }; bonne.scale = function(x) { if (!arguments.length) return scale; scale = +x; return bonne; }; bonne.translate = function(x) { if (!arguments.length) return translate; translate = [ +x[0], +x[1] ]; return bonne; }; return bonne.origin([ 0, 0 ]).parallel(45); }; d3.geo.equirectangular = function() { function equirectangular(coordinates) { var x = coordinates[0] / 360, y = -coordinates[1] / 360; return [ scale * x + translate[0], scale * y + translate[1] ]; } var scale = 500, translate = [ 480, 250 ]; equirectangular.invert = function(coordinates) { var x = (coordinates[0] - translate[0]) / scale, y = (coordinates[1] - translate[1]) / scale; return [ 360 * x, -360 * y ]; }; equirectangular.scale = function(x) { if (!arguments.length) return scale; scale = +x; return equirectangular; }; equirectangular.translate = function(x) { if (!arguments.length) return translate; translate = [ +x[0], +x[1] ]; return equirectangular; }; return equirectangular; }; d3.geo.mercator = function() { function mercator(coordinates) { var x = coordinates[0] / 360, y = -(Math.log(Math.tan(Math.PI / 4 + coordinates[1] * d3_geo_radians / 2)) / d3_geo_radians) / 360; return [ scale * x + translate[0], scale * Math.max(-.5, Math.min(.5, y)) + translate[1] ]; } var scale = 500, translate = [ 480, 250 ]; mercator.invert = function(coordinates) { var x = (coordinates[0] - translate[0]) / scale, y = (coordinates[1] - translate[1]) / scale; return [ 360 * x, 2 * Math.atan(Math.exp(-360 * y * d3_geo_radians)) / d3_geo_radians - 90 ]; }; mercator.scale = function(x) { if (!arguments.length) return scale; scale = +x; return mercator; }; mercator.translate = function(x) { if (!arguments.length) return translate; translate = [ +x[0], +x[1] ]; return mercator; }; return mercator; }; d3.geo.path = function() { function path(d, i) { if (typeof pointRadius === "function") pointCircle = d3_path_circle(pointRadius.apply(this, arguments)); pathType(d); var result = buffer.length ? buffer.join("") : null; buffer = []; return result; } function project(coordinates) { return projection(coordinates).join(","); } function polygonArea(coordinates) { var sum = area(coordinates[0]), i = 0, n = coordinates.length; while (++i < n) sum -= area(coordinates[i]); return sum; } function polygonCentroid(coordinates) { var polygon = d3.geom.polygon(coordinates[0].map(projection)), area = polygon.area(), centroid = polygon.centroid(area < 0 ? (area *= -1, 1) : -1), x = centroid[0], y = centroid[1], z = area, i = 0, n = coordinates.length; while (++i < n) { polygon = d3.geom.polygon(coordinates[i].map(projection)); area = polygon.area(); centroid = polygon.centroid(area < 0 ? (area *= -1, 1) : -1); x -= centroid[0]; y -= centroid[1]; z -= area; } return [ x, y, 6 * z ]; } function area(coordinates) { return Math.abs(d3.geom.polygon(coordinates.map(projection)).area()); } var pointRadius = 4.5, pointCircle = d3_path_circle(pointRadius), projection = d3.geo.albersUsa(), buffer = []; var pathType = d3_geo_type({ FeatureCollection: function(o) { var features = o.features, i = -1, n = features.length; while (++i < n) buffer.push(pathType(features[i].geometry)); }, Feature: function(o) { pathType(o.geometry); }, Point: function(o) { buffer.push("M", project(o.coordinates), pointCircle); }, MultiPoint: function(o) { var coordinates = o.coordinates, i = -1, n = coordinates.length; while (++i < n) buffer.push("M", project(coordinates[i]), pointCircle); }, LineString: function(o) { var coordinates = o.coordinates, i = -1, n = coordinates.length; buffer.push("M"); while (++i < n) buffer.push(project(coordinates[i]), "L"); buffer.pop(); }, MultiLineString: function(o) { var coordinates = o.coordinates, i = -1, n = coordinates.length, subcoordinates, j, m; while (++i < n) { subcoordinates = coordinates[i]; j = -1; m = subcoordinates.length; buffer.push("M"); while (++j < m) buffer.push(project(subcoordinates[j]), "L"); buffer.pop(); } }, Polygon: function(o) { var coordinates = o.coordinates, i = -1, n = coordinates.length, subcoordinates, j, m; while (++i < n) { subcoordinates = coordinates[i]; j = -1; if ((m = subcoordinates.length - 1) > 0) { buffer.push("M"); while (++j < m) buffer.push(project(subcoordinates[j]), "L"); buffer[buffer.length - 1] = "Z"; } } }, MultiPolygon: function(o) { var coordinates = o.coordinates, i = -1, n = coordinates.length, subcoordinates, j, m, subsubcoordinates, k, p; while (++i < n) { subcoordinates = coordinates[i]; j = -1; m = subcoordinates.length; while (++j < m) { subsubcoordinates = subcoordinates[j]; k = -1; if ((p = subsubcoordinates.length - 1) > 0) { buffer.push("M"); while (++k < p) buffer.push(project(subsubcoordinates[k]), "L"); buffer[buffer.length - 1] = "Z"; } } } }, GeometryCollection: function(o) { var geometries = o.geometries, i = -1, n = geometries.length; while (++i < n) buffer.push(pathType(geometries[i])); } }); var areaType = path.area = d3_geo_type({ FeatureCollection: function(o) { var area = 0, features = o.features, i = -1, n = features.length; while (++i < n) area += areaType(features[i]); return area; }, Feature: function(o) { return areaType(o.geometry); }, Polygon: function(o) { return polygonArea(o.coordinates); }, MultiPolygon: function(o) { var sum = 0, coordinates = o.coordinates, i = -1, n = coordinates.length; while (++i < n) sum += polygonArea(coordinates[i]); return sum; }, GeometryCollection: function(o) { var sum = 0, geometries = o.geometries, i = -1, n = geometries.length; while (++i < n) sum += areaType(geometries[i]); return sum; } }, 0); var centroidType = path.centroid = d3_geo_type({ Feature: function(o) { return centroidType(o.geometry); }, Polygon: function(o) { var centroid = polygonCentroid(o.coordinates); return [ centroid[0] / centroid[2], centroid[1] / centroid[2] ]; }, MultiPolygon: function(o) { var area = 0, coordinates = o.coordinates, centroid, x = 0, y = 0, z = 0, i = -1, n = coordinates.length; while (++i < n) { centroid = polygonCentroid(coordinates[i]); x += centroid[0]; y += centroid[1]; z += centroid[2]; } return [ x / z, y / z ]; } }); path.projection = function(x) { projection = x; return path; }; path.pointRadius = function(x) { if (typeof x === "function") pointRadius = x; else { pointRadius = +x; pointCircle = d3_path_circle(pointRadius); } return path; }; return path; }; d3.geo.bounds = function(feature) { var left = Infinity, bottom = Infinity, right = -Infinity, top = -Infinity; d3_geo_bounds(feature, function(x, y) { if (x < left) left = x; if (x > right) right = x; if (y < bottom) bottom = y; if (y > top) top = y; }); return [ [ left, bottom ], [ right, top ] ]; }; var d3_geo_boundsTypes = { Feature: d3_geo_boundsFeature, FeatureCollection: d3_geo_boundsFeatureCollection, GeometryCollection: d3_geo_boundsGeometryCollection, LineString: d3_geo_boundsLineString, MultiLineString: d3_geo_boundsMultiLineString, MultiPoint: d3_geo_boundsLineString, MultiPolygon: d3_geo_boundsMultiPolygon, Point: d3_geo_boundsPoint, Polygon: d3_geo_boundsPolygon }; d3.geo.circle = function() { function circle() {} function visible(point) { return arc.distance(point) < radians; } function clip(coordinates) { var i = -1, n = coordinates.length, clipped = [], p0, p1, p2, d0, d1; while (++i < n) { d1 = arc.distance(p2 = coordinates[i]); if (d1 < radians) { if (p1) clipped.push(d3_geo_greatArcInterpolate(p1, p2)((d0 - radians) / (d0 - d1))); clipped.push(p2); p0 = p1 = null; } else { p1 = p2; if (!p0 && clipped.length) { clipped.push(d3_geo_greatArcInterpolate(clipped[clipped.length - 1], p1)((radians - d0) / (d1 - d0))); p0 = p1; } } d0 = d1; } p0 = coordinates[0]; p1 = clipped[0]; if (p1 && p2[0] === p0[0] && p2[1] === p0[1] && !(p2[0] === p1[0] && p2[1] === p1[1])) { clipped.push(p1); } return resample(clipped); } function resample(coordinates) { var i = 0, n = coordinates.length, j, m, resampled = n ? [ coordinates[0] ] : coordinates, resamples, origin = arc.source(); while (++i < n) { resamples = arc.source(coordinates[i - 1])(coordinates[i]).coordinates; for (j = 0, m = resamples.length; ++j < m; ) resampled.push(resamples[j]); } arc.source(origin); return resampled; } var origin = [ 0, 0 ], degrees = 90 - .01, radians = degrees * d3_geo_radians, arc = d3.geo.greatArc().source(origin).target(d3_identity); circle.clip = function(d) { if (typeof origin === "function") arc.source(origin.apply(this, arguments)); return clipType(d) || null; }; var clipType = d3_geo_type({ FeatureCollection: function(o) { var features = o.features.map(clipType).filter(d3_identity); return features && (o = Object.create(o), o.features = features, o); }, Feature: function(o) { var geometry = clipType(o.geometry); return geometry && (o = Object.create(o), o.geometry = geometry, o); }, Point: function(o) { return visible(o.coordinates) && o; }, MultiPoint: function(o) { var coordinates = o.coordinates.filter(visible); return coordinates.length && { type: o.type, coordinates: coordinates }; }, LineString: function(o) { var coordinates = clip(o.coordinates); return coordinates.length && (o = Object.create(o), o.coordinates = coordinates, o); }, MultiLineString: function(o) { var coordinates = o.coordinates.map(clip).filter(function(d) { return d.length; }); return coordinates.length && (o = Object.create(o), o.coordinates = coordinates, o); }, Polygon: function(o) { var coordinates = o.coordinates.map(clip); return coordinates[0].length && (o = Object.create(o), o.coordinates = coordinates, o); }, MultiPolygon: function(o) { var coordinates = o.coordinates.map(function(d) { return d.map(clip); }).filter(function(d) { return d[0].length; }); return coordinates.length && (o = Object.create(o), o.coordinates = coordinates, o); }, GeometryCollection: function(o) { var geometries = o.geometries.map(clipType).filter(d3_identity); return geometries.length && (o = Object.create(o), o.geometries = geometries, o); } }); circle.origin = function(x) { if (!arguments.length) return origin; origin = x; if (typeof origin !== "function") arc.source(origin); return circle; }; circle.angle = function(x) { if (!arguments.length) return degrees; radians = (degrees = +x) * d3_geo_radians; return circle; }; return d3.rebind(circle, arc, "precision"); }; d3.geo.greatArc = function() { function greatArc() { var d = greatArc.distance.apply(this, arguments), t = 0, dt = precision / d, coordinates = [ p0 ]; while ((t += dt) < 1) coordinates.push(interpolate(t)); coordinates.push(p1); return { type: "LineString", coordinates: coordinates }; } var source = d3_geo_greatArcSource, p0, target = d3_geo_greatArcTarget, p1, precision = 6 * d3_geo_radians, interpolate = d3_geo_greatArcInterpolator(); greatArc.distance = function() { if (typeof source === "function") interpolate.source(p0 = source.apply(this, arguments)); if (typeof target === "function") interpolate.target(p1 = target.apply(this, arguments)); return interpolate.distance(); }; greatArc.source = function(_) { if (!arguments.length) return source; source = _; if (typeof source !== "function") interpolate.source(p0 = source); return greatArc; }; greatArc.target = function(_) { if (!arguments.length) return target; target = _; if (typeof target !== "function") interpolate.target(p1 = target); return greatArc; }; greatArc.precision = function(_) { if (!arguments.length) return precision / d3_geo_radians; precision = _ * d3_geo_radians; return greatArc; }; return greatArc; }; d3.geo.greatCircle = d3.geo.circle; d3.geom = {}; d3.geom.contour = function(grid, start) { var s = start || d3_geom_contourStart(grid), c = [], x = s[0], y = s[1], dx = 0, dy = 0, pdx = NaN, pdy = NaN, i = 0; do { i = 0; if (grid(x - 1, y - 1)) i += 1; if (grid(x, y - 1)) i += 2; if (grid(x - 1, y)) i += 4; if (grid(x, y)) i += 8; if (i === 6) { dx = pdy === -1 ? -1 : 1; dy = 0; } else if (i === 9) { dx = 0; dy = pdx === 1 ? -1 : 1; } else { dx = d3_geom_contourDx[i]; dy = d3_geom_contourDy[i]; } if (dx != pdx && dy != pdy) { c.push([ x, y ]); pdx = dx; pdy = dy; } x += dx; y += dy; } while (s[0] != x || s[1] != y); return c; }; var d3_geom_contourDx = [ 1, 0, 1, 1, -1, 0, -1, 1, 0, 0, 0, 0, -1, 0, -1, NaN ], d3_geom_contourDy = [ 0, -1, 0, 0, 0, -1, 0, 0, 1, -1, 1, 1, 0, -1, 0, NaN ]; d3.geom.hull = function(vertices) { if (vertices.length < 3) return []; var len = vertices.length, plen = len - 1, points = [], stack = [], i, j, h = 0, x1, y1, x2, y2, u, v, a, sp; for (i = 1; i < len; ++i) { if (vertices[i][1] < vertices[h][1]) { h = i; } else if (vertices[i][1] == vertices[h][1]) { h = vertices[i][0] < vertices[h][0] ? i : h; } } for (i = 0; i < len; ++i) { if (i === h) continue; y1 = vertices[i][1] - vertices[h][1]; x1 = vertices[i][0] - vertices[h][0]; points.push({ angle: Math.atan2(y1, x1), index: i }); } points.sort(function(a, b) { return a.angle - b.angle; }); a = points[0].angle; v = points[0].index; u = 0; for (i = 1; i < plen; ++i) { j = points[i].index; if (a == points[i].angle) { x1 = vertices[v][0] - vertices[h][0]; y1 = vertices[v][1] - vertices[h][1]; x2 = vertices[j][0] - vertices[h][0]; y2 = vertices[j][1] - vertices[h][1]; if (x1 * x1 + y1 * y1 >= x2 * x2 + y2 * y2) { points[i].index = -1; } else { points[u].index = -1; a = points[i].angle; u = i; v = j; } } else { a = points[i].angle; u = i; v = j; } } stack.push(h); for (i = 0, j = 0; i < 2; ++j) { if (points[j].index !== -1) { stack.push(points[j].index); i++; } } sp = stack.length; for (; j < plen; ++j) { if (points[j].index === -1) continue; while (!d3_geom_hullCCW(stack[sp - 2], stack[sp - 1], points[j].index, vertices)) { --sp; } stack[sp++] = points[j].index; } var poly = []; for (i = 0; i < sp; ++i) { poly.push(vertices[stack[i]]); } return poly; }; d3.geom.polygon = function(coordinates) { coordinates.area = function() { var i = 0, n = coordinates.length, a = coordinates[n - 1][0] * coordinates[0][1], b = coordinates[n - 1][1] * coordinates[0][0]; while (++i < n) { a += coordinates[i - 1][0] * coordinates[i][1]; b += coordinates[i - 1][1] * coordinates[i][0]; } return (b - a) * .5; }; coordinates.centroid = function(k) { var i = -1, n = coordinates.length, x = 0, y = 0, a, b = coordinates[n - 1], c; if (!arguments.length) k = -1 / (6 * coordinates.area()); while (++i < n) { a = b; b = coordinates[i]; c = a[0] * b[1] - b[0] * a[1]; x += (a[0] + b[0]) * c; y += (a[1] + b[1]) * c; } return [ x * k, y * k ]; }; coordinates.clip = function(subject) { var input, i = -1, n = coordinates.length, j, m, a = coordinates[n - 1], b, c, d; while (++i < n) { input = subject.slice(); subject.length = 0; b = coordinates[i]; c = input[(m = input.length) - 1]; j = -1; while (++j < m) { d = input[j]; if (d3_geom_polygonInside(d, a, b)) { if (!d3_geom_polygonInside(c, a, b)) { subject.push(d3_geom_polygonIntersect(c, d, a, b)); } subject.push(d); } else if (d3_geom_polygonInside(c, a, b)) { subject.push(d3_geom_polygonIntersect(c, d, a, b)); } c = d; } a = b; } return subject; }; return coordinates; }; d3.geom.voronoi = function(vertices) { var polygons = vertices.map(function() { return []; }); d3_voronoi_tessellate(vertices, function(e) { var s1, s2, x1, x2, y1, y2; if (e.a === 1 && e.b >= 0) { s1 = e.ep.r; s2 = e.ep.l; } else { s1 = e.ep.l; s2 = e.ep.r; } if (e.a === 1) { y1 = s1 ? s1.y : -1e6; x1 = e.c - e.b * y1; y2 = s2 ? s2.y : 1e6; x2 = e.c - e.b * y2; } else { x1 = s1 ? s1.x : -1e6; y1 = e.c - e.a * x1; x2 = s2 ? s2.x : 1e6; y2 = e.c - e.a * x2; } var v1 = [ x1, y1 ], v2 = [ x2, y2 ]; polygons[e.region.l.index].push(v1, v2); polygons[e.region.r.index].push(v1, v2); }); return polygons.map(function(polygon, i) { var cx = vertices[i][0], cy = vertices[i][1]; polygon.forEach(function(v) { v.angle = Math.atan2(v[0] - cx, v[1] - cy); }); return polygon.sort(function(a, b) { return a.angle - b.angle; }).filter(function(d, i) { return !i || d.angle - polygon[i - 1].angle > 1e-10; }); }); }; var d3_voronoi_opposite = { l: "r", r: "l" }; d3.geom.delaunay = function(vertices) { var edges = vertices.map(function() { return []; }), triangles = []; d3_voronoi_tessellate(vertices, function(e) { edges[e.region.l.index].push(vertices[e.region.r.index]); }); edges.forEach(function(edge, i) { var v = vertices[i], cx = v[0], cy = v[1]; edge.forEach(function(v) { v.angle = Math.atan2(v[0] - cx, v[1] - cy); }); edge.sort(function(a, b) { return a.angle - b.angle; }); for (var j = 0, m = edge.length - 1; j < m; j++) { triangles.push([ v, edge[j], edge[j + 1] ]); } }); return triangles; }; d3.geom.quadtree = function(points, x1, y1, x2, y2) { function insert(n, p, x1, y1, x2, y2) { if (isNaN(p.x) || isNaN(p.y)) return; if (n.leaf) { var v = n.point; if (v) { if (Math.abs(v.x - p.x) + Math.abs(v.y - p.y) < .01) { insertChild(n, p, x1, y1, x2, y2); } else { n.point = null; insertChild(n, v, x1, y1, x2, y2); insertChild(n, p, x1, y1, x2, y2); } } else { n.point = p; } } else { insertChild(n, p, x1, y1, x2, y2); } } function insertChild(n, p, x1, y1, x2, y2) { var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, right = p.x >= sx, bottom = p.y >= sy, i = (bottom << 1) + right; n.leaf = false; n = n.nodes[i] || (n.nodes[i] = d3_geom_quadtreeNode()); if (right) x1 = sx; else x2 = sx; if (bottom) y1 = sy; else y2 = sy; insert(n, p, x1, y1, x2, y2); } var p, i = -1, n = points.length; if (n && isNaN(points[0].x)) points = points.map(d3_geom_quadtreePoint); if (arguments.length < 5) { if (arguments.length === 3) { y2 = x2 = y1; y1 = x1; } else { x1 = y1 = Infinity; x2 = y2 = -Infinity; while (++i < n) { p = points[i]; if (p.x < x1) x1 = p.x; if (p.y < y1) y1 = p.y; if (p.x > x2) x2 = p.x; if (p.y > y2) y2 = p.y; } var dx = x2 - x1, dy = y2 - y1; if (dx > dy) y2 = y1 + dx; else x2 = x1 + dy; } } var root = d3_geom_quadtreeNode(); root.add = function(p) { insert(root, p, x1, y1, x2, y2); }; root.visit = function(f) { d3_geom_quadtreeVisit(f, root, x1, y1, x2, y2); }; points.forEach(root.add); return root; }; d3.time = {}; var d3_time = Date, d3_time_daySymbols = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ]; d3_time_utc.prototype = { getDate: function() { return this._.getUTCDate(); }, getDay: function() { return this._.getUTCDay(); }, getFullYear: function() { return this._.getUTCFullYear(); }, getHours: function() { return this._.getUTCHours(); }, getMilliseconds: function() { return this._.getUTCMilliseconds(); }, getMinutes: function() { return this._.getUTCMinutes(); }, getMonth: function() { return this._.getUTCMonth(); }, getSeconds: function() { return this._.getUTCSeconds(); }, getTime: function() { return this._.getTime(); }, getTimezoneOffset: function() { return 0; }, valueOf: function() { return this._.valueOf(); }, setDate: function() { d3_time_prototype.setUTCDate.apply(this._, arguments); }, setDay: function() { d3_time_prototype.setUTCDay.apply(this._, arguments); }, setFullYear: function() { d3_time_prototype.setUTCFullYear.apply(this._, arguments); }, setHours: function() { d3_time_prototype.setUTCHours.apply(this._, arguments); }, setMilliseconds: function() { d3_time_prototype.setUTCMilliseconds.apply(this._, arguments); }, setMinutes: function() { d3_time_prototype.setUTCMinutes.apply(this._, arguments); }, setMonth: function() { d3_time_prototype.setUTCMonth.apply(this._, arguments); }, setSeconds: function() { d3_time_prototype.setUTCSeconds.apply(this._, arguments); }, setTime: function() { d3_time_prototype.setTime.apply(this._, arguments); } }; var d3_time_prototype = Date.prototype; var d3_time_formatDateTime = "%a %b %e %H:%M:%S %Y", d3_time_formatDate = "%m/%d/%y", d3_time_formatTime = "%H:%M:%S"; var d3_time_days = d3_time_daySymbols, d3_time_dayAbbreviations = d3_time_days.map(d3_time_formatAbbreviate), d3_time_months = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], d3_time_monthAbbreviations = d3_time_months.map(d3_time_formatAbbreviate); d3.time.format = function(template) { function format(date) { var string = [], i = -1, j = 0, c, f; while (++i < n) { if (template.charCodeAt(i) == 37) { string.push(template.substring(j, i), (f = d3_time_formats[c = template.charAt(++i)]) ? f(date) : c); j = i + 1; } } string.push(template.substring(j, i)); return string.join(""); } var n = template.length; format.parse = function(string) { var d = { y: 1900, m: 0, d: 1, H: 0, M: 0, S: 0, L: 0 }, i = d3_time_parse(d, template, string, 0); if (i != string.length) return null; if ("p" in d) d.H = d.H % 12 + d.p * 12; var date = new d3_time; date.setFullYear(d.y, d.m, d.d); date.setHours(d.H, d.M, d.S, d.L); return date; }; format.toString = function() { return template; }; return format; }; var d3_time_zfill2 = d3.format("02d"), d3_time_zfill3 = d3.format("03d"), d3_time_zfill4 = d3.format("04d"), d3_time_sfill2 = d3.format("2d"); var d3_time_dayRe = d3_time_formatRe(d3_time_days), d3_time_dayAbbrevRe = d3_time_formatRe(d3_time_dayAbbreviations), d3_time_monthRe = d3_time_formatRe(d3_time_months), d3_time_monthLookup = d3_time_formatLookup(d3_time_months), d3_time_monthAbbrevRe = d3_time_formatRe(d3_time_monthAbbreviations), d3_time_monthAbbrevLookup = d3_time_formatLookup(d3_time_monthAbbreviations); var d3_time_formats = { a: function(d) { return d3_time_dayAbbreviations[d.getDay()]; }, A: function(d) { return d3_time_days[d.getDay()]; }, b: function(d) { return d3_time_monthAbbreviations[d.getMonth()]; }, B: function(d) { return d3_time_months[d.getMonth()]; }, c: d3.time.format(d3_time_formatDateTime), d: function(d) { return d3_time_zfill2(d.getDate()); }, e: function(d) { return d3_time_sfill2(d.getDate()); }, H: function(d) { return d3_time_zfill2(d.getHours()); }, I: function(d) { return d3_time_zfill2(d.getHours() % 12 || 12); }, j: function(d) { return d3_time_zfill3(1 + d3.time.dayOfYear(d)); }, L: function(d) { return d3_time_zfill3(d.getMilliseconds()); }, m: function(d) { return d3_time_zfill2(d.getMonth() + 1); }, M: function(d) { return d3_time_zfill2(d.getMinutes()); }, p: function(d) { return d.getHours() >= 12 ? "PM" : "AM"; }, S: function(d) { return d3_time_zfill2(d.getSeconds()); }, U: function(d) { return d3_time_zfill2(d3.time.sundayOfYear(d)); }, w: function(d) { return d.getDay(); }, W: function(d) { return d3_time_zfill2(d3.time.mondayOfYear(d)); }, x: d3.time.format(d3_time_formatDate), X: d3.time.format(d3_time_formatTime), y: function(d) { return d3_time_zfill2(d.getFullYear() % 100); }, Y: function(d) { return d3_time_zfill4(d.getFullYear() % 1e4); }, Z: d3_time_zone, "%": function(d) { return "%"; } }; var d3_time_parsers = { a: d3_time_parseWeekdayAbbrev, A: d3_time_parseWeekday, b: d3_time_parseMonthAbbrev, B: d3_time_parseMonth, c: d3_time_parseLocaleFull, d: d3_time_parseDay, e: d3_time_parseDay, H: d3_time_parseHour24, I: d3_time_parseHour24, L: d3_time_parseMilliseconds, m: d3_time_parseMonthNumber, M: d3_time_parseMinutes, p: d3_time_parseAmPm, S: d3_time_parseSeconds, x: d3_time_parseLocaleDate, X: d3_time_parseLocaleTime, y: d3_time_parseYear, Y: d3_time_parseFullYear }; var d3_time_numberRe = /^\s*\d+/; var d3_time_amPmLookup = d3.map({ am: 0, pm: 1 }); d3.time.format.utc = function(template) { function format(date) { try { d3_time = d3_time_utc; var utc = new d3_time; utc._ = date; return local(utc); } finally { d3_time = Date; } } var local = d3.time.format(template); format.parse = function(string) { try { d3_time = d3_time_utc; var date = local.parse(string); return date && date._; } finally { d3_time = Date; } }; format.toString = local.toString; return format; }; var d3_time_formatIso = d3.time.format.utc("%Y-%m-%dT%H:%M:%S.%LZ"); d3.time.format.iso = Date.prototype.toISOString ? d3_time_formatIsoNative : d3_time_formatIso; d3_time_formatIsoNative.parse = function(string) { var date = new Date(string); return isNaN(date) ? null : date; }; d3_time_formatIsoNative.toString = d3_time_formatIso.toString; d3.time.second = d3_time_interval(function(date) { return new d3_time(Math.floor(date / 1e3) * 1e3); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 1e3); }, function(date) { return date.getSeconds(); }); d3.time.seconds = d3.time.second.range; d3.time.seconds.utc = d3.time.second.utc.range; d3.time.minute = d3_time_interval(function(date) { return new d3_time(Math.floor(date / 6e4) * 6e4); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 6e4); }, function(date) { return date.getMinutes(); }); d3.time.minutes = d3.time.minute.range; d3.time.minutes.utc = d3.time.minute.utc.range; d3.time.hour = d3_time_interval(function(date) { var timezone = date.getTimezoneOffset() / 60; return new d3_time((Math.floor(date / 36e5 - timezone) + timezone) * 36e5); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 36e5); }, function(date) { return date.getHours(); }); d3.time.hours = d3.time.hour.range; d3.time.hours.utc = d3.time.hour.utc.range; d3.time.day = d3_time_interval(function(date) { var day = new d3_time(1970, 0); day.setFullYear(date.getFullYear(), date.getMonth(), date.getDate()); return day; }, function(date, offset) { date.setDate(date.getDate() + offset); }, function(date) { return date.getDate() - 1; }); d3.time.days = d3.time.day.range; d3.time.days.utc = d3.time.day.utc.range; d3.time.dayOfYear = function(date) { var year = d3.time.year(date); return Math.floor((date - year - (date.getTimezoneOffset() - year.getTimezoneOffset()) * 6e4) / 864e5); }; d3_time_daySymbols.forEach(function(day, i) { day = day.toLowerCase(); i = 7 - i; var interval = d3.time[day] = d3_time_interval(function(date) { (date = d3.time.day(date)).setDate(date.getDate() - (date.getDay() + i) % 7); return date; }, function(date, offset) { date.setDate(date.getDate() + Math.floor(offset) * 7); }, function(date) { var day = d3.time.year(date).getDay(); return Math.floor((d3.time.dayOfYear(date) + (day + i) % 7) / 7) - (day !== i); }); d3.time[day + "s"] = interval.range; d3.time[day + "s"].utc = interval.utc.range; d3.time[day + "OfYear"] = function(date) { var day = d3.time.year(date).getDay(); return Math.floor((d3.time.dayOfYear(date) + (day + i) % 7) / 7); }; }); d3.time.week = d3.time.sunday; d3.time.weeks = d3.time.sunday.range; d3.time.weeks.utc = d3.time.sunday.utc.range; d3.time.weekOfYear = d3.time.sundayOfYear; d3.time.month = d3_time_interval(function(date) { date = d3.time.day(date); date.setDate(1); return date; }, function(date, offset) { date.setMonth(date.getMonth() + offset); }, function(date) { return date.getMonth(); }); d3.time.months = d3.time.month.range; d3.time.months.utc = d3.time.month.utc.range; d3.time.year = d3_time_interval(function(date) { date = d3.time.day(date); date.setMonth(0, 1); return date; }, function(date, offset) { date.setFullYear(date.getFullYear() + offset); }, function(date) { return date.getFullYear(); }); d3.time.years = d3.time.year.range; d3.time.years.utc = d3.time.year.utc.range; var d3_time_scaleSteps = [ 1e3, 5e3, 15e3, 3e4, 6e4, 3e5, 9e5, 18e5, 36e5, 108e5, 216e5, 432e5, 864e5, 1728e5, 6048e5, 2592e6, 7776e6, 31536e6 ]; var d3_time_scaleLocalMethods = [ [ d3.time.second, 1 ], [ d3.time.second, 5 ], [ d3.time.second, 15 ], [ d3.time.second, 30 ], [ d3.time.minute, 1 ], [ d3.time.minute, 5 ], [ d3.time.minute, 15 ], [ d3.time.minute, 30 ], [ d3.time.hour, 1 ], [ d3.time.hour, 3 ], [ d3.time.hour, 6 ], [ d3.time.hour, 12 ], [ d3.time.day, 1 ], [ d3.time.day, 2 ], [ d3.time.week, 1 ], [ d3.time.month, 1 ], [ d3.time.month, 3 ], [ d3.time.year, 1 ] ]; var d3_time_scaleLocalFormats = [ [ d3.time.format("%Y"), function(d) { return true; } ], [ d3.time.format("%B"), function(d) { return d.getMonth(); } ], [ d3.time.format("%b %d"), function(d) { return d.getDate() != 1; } ], [ d3.time.format("%a %d"), function(d) { return d.getDay() && d.getDate() != 1; } ], [ d3.time.format("%I %p"), function(d) { return d.getHours(); } ], [ d3.time.format("%I:%M"), function(d) { return d.getMinutes(); } ], [ d3.time.format(":%S"), function(d) { return d.getSeconds(); } ], [ d3.time.format(".%L"), function(d) { return d.getMilliseconds(); } ] ]; var d3_time_scaleLinear = d3.scale.linear(), d3_time_scaleLocalFormat = d3_time_scaleFormat(d3_time_scaleLocalFormats); d3_time_scaleLocalMethods.year = function(extent, m) { return d3_time_scaleLinear.domain(extent.map(d3_time_scaleGetYear)).ticks(m).map(d3_time_scaleSetYear); }; d3.time.scale = function() { return d3_time_scale(d3.scale.linear(), d3_time_scaleLocalMethods, d3_time_scaleLocalFormat); }; var d3_time_scaleUTCMethods = d3_time_scaleLocalMethods.map(function(m) { return [ m[0].utc, m[1] ]; }); var d3_time_scaleUTCFormats = [ [ d3.time.format.utc("%Y"), function(d) { return true; } ], [ d3.time.format.utc("%B"), function(d) { return d.getUTCMonth(); } ], [ d3.time.format.utc("%b %d"), function(d) { return d.getUTCDate() != 1; } ], [ d3.time.format.utc("%a %d"), function(d) { return d.getUTCDay() && d.getUTCDate() != 1; } ], [ d3.time.format.utc("%I %p"), function(d) { return d.getUTCHours(); } ], [ d3.time.format.utc("%I:%M"), function(d) { return d.getUTCMinutes(); } ], [ d3.time.format.utc(":%S"), function(d) { return d.getUTCSeconds(); } ], [ d3.time.format.utc(".%L"), function(d) { return d.getUTCMilliseconds(); } ] ]; var d3_time_scaleUTCFormat = d3_time_scaleFormat(d3_time_scaleUTCFormats); d3_time_scaleUTCMethods.year = function(extent, m) { return d3_time_scaleLinear.domain(extent.map(d3_time_scaleUTCGetYear)).ticks(m).map(d3_time_scaleUTCSetYear); }; d3.time.scale.utc = function() { return d3_time_scale(d3.scale.linear(), d3_time_scaleUTCMethods, d3_time_scaleUTCFormat); }; })();ceilometer-2014.1.5/ceilometer/api/v1/static/jquery-1.8.3.js0000664000567000056700000101273312540642615024310 0ustar jenkinsjenkins00000000000000/*! * jQuery JavaScript Library v1.8.3 * http://jquery.com/ * * Includes Sizzle.js * http://sizzlejs.com/ * * Copyright 2012 jQuery Foundation and other contributors * Released under the MIT license * http://jquery.org/license * * Date: Tue Nov 13 2012 08:20:33 GMT-0500 (Eastern Standard Time) */ (function( window, undefined ) { var // A central reference to the root jQuery(document) rootjQuery, // The deferred used on DOM ready readyList, // Use the correct document accordingly with window argument (sandbox) document = window.document, location = window.location, navigator = window.navigator, // Map over jQuery in case of overwrite _jQuery = window.jQuery, // Map over the $ in case of overwrite _$ = window.$, // Save a reference to some core methods core_push = Array.prototype.push, core_slice = Array.prototype.slice, core_indexOf = Array.prototype.indexOf, core_toString = Object.prototype.toString, core_hasOwn = Object.prototype.hasOwnProperty, core_trim = String.prototype.trim, // Define a local copy of jQuery jQuery = function( selector, context ) { // The jQuery object is actually just the init constructor 'enhanced' return new jQuery.fn.init( selector, context, rootjQuery ); }, // Used for matching numbers core_pnum = /[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source, // Used for detecting and trimming whitespace core_rnotwhite = /\S/, core_rspace = /\s+/, // Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 and IE) rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, // A simple way to check for HTML strings // Prioritize #id over to avoid XSS via location.hash (#9521) rquickExpr = /^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/, // Match a standalone tag rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>|)$/, // JSON RegExp rvalidchars = /^[\],:{}\s]*$/, rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, rvalidescape = /\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g, rvalidtokens = /"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g, // Matches dashed string for camelizing rmsPrefix = /^-ms-/, rdashAlpha = /-([\da-z])/gi, // Used by jQuery.camelCase as callback to replace() fcamelCase = function( all, letter ) { return ( letter + "" ).toUpperCase(); }, // The ready event handler and self cleanup method DOMContentLoaded = function() { if ( document.addEventListener ) { document.removeEventListener( "DOMContentLoaded", DOMContentLoaded, false ); jQuery.ready(); } else if ( document.readyState === "complete" ) { // we're here because readyState === "complete" in oldIE // which is good enough for us to call the dom ready! document.detachEvent( "onreadystatechange", DOMContentLoaded ); jQuery.ready(); } }, // [[Class]] -> type pairs class2type = {}; jQuery.fn = jQuery.prototype = { constructor: jQuery, init: function( selector, context, rootjQuery ) { var match, elem, ret, doc; // Handle $(""), $(null), $(undefined), $(false) if ( !selector ) { return this; } // Handle $(DOMElement) if ( selector.nodeType ) { this.context = this[0] = selector; this.length = 1; return this; } // Handle HTML strings if ( typeof selector === "string" ) { if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { // Assume that strings that start and end with <> are HTML and skip the regex check match = [ null, selector, null ]; } else { match = rquickExpr.exec( selector ); } // Match html or make sure no context is specified for #id if ( match && (match[1] || !context) ) { // HANDLE: $(html) -> $(array) if ( match[1] ) { context = context instanceof jQuery ? context[0] : context; doc = ( context && context.nodeType ? context.ownerDocument || context : document ); // scripts is true for back-compat selector = jQuery.parseHTML( match[1], doc, true ); if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) { this.attr.call( selector, context, true ); } return jQuery.merge( this, selector ); // HANDLE: $(#id) } else { elem = document.getElementById( match[2] ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE and Opera return items // by name instead of ID if ( elem.id !== match[2] ) { return rootjQuery.find( selector ); } // Otherwise, we inject the element directly into the jQuery object this.length = 1; this[0] = elem; } this.context = document; this.selector = selector; return this; } // HANDLE: $(expr, $(...)) } else if ( !context || context.jquery ) { return ( context || rootjQuery ).find( selector ); // HANDLE: $(expr, context) // (which is just equivalent to: $(context).find(expr) } else { return this.constructor( context ).find( selector ); } // HANDLE: $(function) // Shortcut for document ready } else if ( jQuery.isFunction( selector ) ) { return rootjQuery.ready( selector ); } if ( selector.selector !== undefined ) { this.selector = selector.selector; this.context = selector.context; } return jQuery.makeArray( selector, this ); }, // Start with an empty selector selector: "", // The current version of jQuery being used jquery: "1.8.3", // The default length of a jQuery object is 0 length: 0, // The number of elements contained in the matched element set size: function() { return this.length; }, toArray: function() { return core_slice.call( this ); }, // Get the Nth element in the matched element set OR // Get the whole matched element set as a clean array get: function( num ) { return num == null ? // Return a 'clean' array this.toArray() : // Return just the object ( num < 0 ? this[ this.length + num ] : this[ num ] ); }, // Take an array of elements and push it onto the stack // (returning the new matched element set) pushStack: function( elems, name, selector ) { // Build a new jQuery matched element set var ret = jQuery.merge( this.constructor(), elems ); // Add the old object onto the stack (as a reference) ret.prevObject = this; ret.context = this.context; if ( name === "find" ) { ret.selector = this.selector + ( this.selector ? " " : "" ) + selector; } else if ( name ) { ret.selector = this.selector + "." + name + "(" + selector + ")"; } // Return the newly-formed element set return ret; }, // Execute a callback for every element in the matched set. // (You can seed the arguments with an array of args, but this is // only used internally.) each: function( callback, args ) { return jQuery.each( this, callback, args ); }, ready: function( fn ) { // Add the callback jQuery.ready.promise().done( fn ); return this; }, eq: function( i ) { i = +i; return i === -1 ? this.slice( i ) : this.slice( i, i + 1 ); }, first: function() { return this.eq( 0 ); }, last: function() { return this.eq( -1 ); }, slice: function() { return this.pushStack( core_slice.apply( this, arguments ), "slice", core_slice.call(arguments).join(",") ); }, map: function( callback ) { return this.pushStack( jQuery.map(this, function( elem, i ) { return callback.call( elem, i, elem ); })); }, end: function() { return this.prevObject || this.constructor(null); }, // For internal use only. // Behaves like an Array's method, not like a jQuery method. push: core_push, sort: [].sort, splice: [].splice }; // Give the init function the jQuery prototype for later instantiation jQuery.fn.init.prototype = jQuery.fn; jQuery.extend = jQuery.fn.extend = function() { var options, name, src, copy, copyIsArray, clone, target = arguments[0] || {}, i = 1, length = arguments.length, deep = false; // Handle a deep copy situation if ( typeof target === "boolean" ) { deep = target; target = arguments[1] || {}; // skip the boolean and the target i = 2; } // Handle case when target is a string or something (possible in deep copy) if ( typeof target !== "object" && !jQuery.isFunction(target) ) { target = {}; } // extend jQuery itself if only one argument is passed if ( length === i ) { target = this; --i; } for ( ; i < length; i++ ) { // Only deal with non-null/undefined values if ( (options = arguments[ i ]) != null ) { // Extend the base object for ( name in options ) { src = target[ name ]; copy = options[ name ]; // Prevent never-ending loop if ( target === copy ) { continue; } // Recurse if we're merging plain objects or arrays if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { if ( copyIsArray ) { copyIsArray = false; clone = src && jQuery.isArray(src) ? src : []; } else { clone = src && jQuery.isPlainObject(src) ? src : {}; } // Never move original objects, clone them target[ name ] = jQuery.extend( deep, clone, copy ); // Don't bring in undefined values } else if ( copy !== undefined ) { target[ name ] = copy; } } } } // Return the modified object return target; }; jQuery.extend({ noConflict: function( deep ) { if ( window.$ === jQuery ) { window.$ = _$; } if ( deep && window.jQuery === jQuery ) { window.jQuery = _jQuery; } return jQuery; }, // Is the DOM ready to be used? Set to true once it occurs. isReady: false, // A counter to track how many items to wait for before // the ready event fires. See #6781 readyWait: 1, // Hold (or release) the ready event holdReady: function( hold ) { if ( hold ) { jQuery.readyWait++; } else { jQuery.ready( true ); } }, // Handle when the DOM is ready ready: function( wait ) { // Abort if there are pending holds or we're already ready if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { return; } // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). if ( !document.body ) { return setTimeout( jQuery.ready, 1 ); } // Remember that the DOM is ready jQuery.isReady = true; // If a normal DOM Ready event fired, decrement, and wait if need be if ( wait !== true && --jQuery.readyWait > 0 ) { return; } // If there are functions bound, to execute readyList.resolveWith( document, [ jQuery ] ); // Trigger any bound ready events if ( jQuery.fn.trigger ) { jQuery( document ).trigger("ready").off("ready"); } }, // See test/unit/core.js for details concerning isFunction. // Since version 1.3, DOM methods and functions like alert // aren't supported. They return false on IE (#2968). isFunction: function( obj ) { return jQuery.type(obj) === "function"; }, isArray: Array.isArray || function( obj ) { return jQuery.type(obj) === "array"; }, isWindow: function( obj ) { return obj != null && obj == obj.window; }, isNumeric: function( obj ) { return !isNaN( parseFloat(obj) ) && isFinite( obj ); }, type: function( obj ) { return obj == null ? String( obj ) : class2type[ core_toString.call(obj) ] || "object"; }, isPlainObject: function( obj ) { // Must be an Object. // Because of IE, we also have to check the presence of the constructor property. // Make sure that DOM nodes and window objects don't pass through, as well if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { return false; } try { // Not own constructor property must be Object if ( obj.constructor && !core_hasOwn.call(obj, "constructor") && !core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { return false; } } catch ( e ) { // IE8,9 Will throw exceptions on certain host objects #9897 return false; } // Own properties are enumerated firstly, so to speed up, // if last one is own, then all properties are own. var key; for ( key in obj ) {} return key === undefined || core_hasOwn.call( obj, key ); }, isEmptyObject: function( obj ) { var name; for ( name in obj ) { return false; } return true; }, error: function( msg ) { throw new Error( msg ); }, // data: string of html // context (optional): If specified, the fragment will be created in this context, defaults to document // scripts (optional): If true, will include scripts passed in the html string parseHTML: function( data, context, scripts ) { var parsed; if ( !data || typeof data !== "string" ) { return null; } if ( typeof context === "boolean" ) { scripts = context; context = 0; } context = context || document; // Single tag if ( (parsed = rsingleTag.exec( data )) ) { return [ context.createElement( parsed[1] ) ]; } parsed = jQuery.buildFragment( [ data ], context, scripts ? null : [] ); return jQuery.merge( [], (parsed.cacheable ? jQuery.clone( parsed.fragment ) : parsed.fragment).childNodes ); }, parseJSON: function( data ) { if ( !data || typeof data !== "string") { return null; } // Make sure leading/trailing whitespace is removed (IE can't handle it) data = jQuery.trim( data ); // Attempt to parse using the native JSON parser first if ( window.JSON && window.JSON.parse ) { return window.JSON.parse( data ); } // Make sure the incoming data is actual JSON // Logic borrowed from http://json.org/json2.js if ( rvalidchars.test( data.replace( rvalidescape, "@" ) .replace( rvalidtokens, "]" ) .replace( rvalidbraces, "")) ) { return ( new Function( "return " + data ) )(); } jQuery.error( "Invalid JSON: " + data ); }, // Cross-browser xml parsing parseXML: function( data ) { var xml, tmp; if ( !data || typeof data !== "string" ) { return null; } try { if ( window.DOMParser ) { // Standard tmp = new DOMParser(); xml = tmp.parseFromString( data , "text/xml" ); } else { // IE xml = new ActiveXObject( "Microsoft.XMLDOM" ); xml.async = "false"; xml.loadXML( data ); } } catch( e ) { xml = undefined; } if ( !xml || !xml.documentElement || xml.getElementsByTagName( "parsererror" ).length ) { jQuery.error( "Invalid XML: " + data ); } return xml; }, noop: function() {}, // Evaluates a script in a global context // Workarounds based on findings by Jim Driscoll // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context globalEval: function( data ) { if ( data && core_rnotwhite.test( data ) ) { // We use execScript on Internet Explorer // We use an anonymous function so that context is window // rather than jQuery in Firefox ( window.execScript || function( data ) { window[ "eval" ].call( window, data ); } )( data ); } }, // Convert dashed to camelCase; used by the css and data modules // Microsoft forgot to hump their vendor prefix (#9572) camelCase: function( string ) { return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); }, nodeName: function( elem, name ) { return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); }, // args is for internal usage only each: function( obj, callback, args ) { var name, i = 0, length = obj.length, isObj = length === undefined || jQuery.isFunction( obj ); if ( args ) { if ( isObj ) { for ( name in obj ) { if ( callback.apply( obj[ name ], args ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.apply( obj[ i++ ], args ) === false ) { break; } } } // A special, fast, case for the most common use of each } else { if ( isObj ) { for ( name in obj ) { if ( callback.call( obj[ name ], name, obj[ name ] ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.call( obj[ i ], i, obj[ i++ ] ) === false ) { break; } } } } return obj; }, // Use native String.trim function wherever possible trim: core_trim && !core_trim.call("\uFEFF\xA0") ? function( text ) { return text == null ? "" : core_trim.call( text ); } : // Otherwise use our own trimming functionality function( text ) { return text == null ? "" : ( text + "" ).replace( rtrim, "" ); }, // results is for internal usage only makeArray: function( arr, results ) { var type, ret = results || []; if ( arr != null ) { // The window, strings (and functions) also have 'length' // Tweaked logic slightly to handle Blackberry 4.7 RegExp issues #6930 type = jQuery.type( arr ); if ( arr.length == null || type === "string" || type === "function" || type === "regexp" || jQuery.isWindow( arr ) ) { core_push.call( ret, arr ); } else { jQuery.merge( ret, arr ); } } return ret; }, inArray: function( elem, arr, i ) { var len; if ( arr ) { if ( core_indexOf ) { return core_indexOf.call( arr, elem, i ); } len = arr.length; i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0; for ( ; i < len; i++ ) { // Skip accessing in sparse arrays if ( i in arr && arr[ i ] === elem ) { return i; } } } return -1; }, merge: function( first, second ) { var l = second.length, i = first.length, j = 0; if ( typeof l === "number" ) { for ( ; j < l; j++ ) { first[ i++ ] = second[ j ]; } } else { while ( second[j] !== undefined ) { first[ i++ ] = second[ j++ ]; } } first.length = i; return first; }, grep: function( elems, callback, inv ) { var retVal, ret = [], i = 0, length = elems.length; inv = !!inv; // Go through the array, only saving the items // that pass the validator function for ( ; i < length; i++ ) { retVal = !!callback( elems[ i ], i ); if ( inv !== retVal ) { ret.push( elems[ i ] ); } } return ret; }, // arg is for internal usage only map: function( elems, callback, arg ) { var value, key, ret = [], i = 0, length = elems.length, // jquery objects are treated as arrays isArray = elems instanceof jQuery || length !== undefined && typeof length === "number" && ( ( length > 0 && elems[ 0 ] && elems[ length -1 ] ) || length === 0 || jQuery.isArray( elems ) ) ; // Go through the array, translating each of the items to their if ( isArray ) { for ( ; i < length; i++ ) { value = callback( elems[ i ], i, arg ); if ( value != null ) { ret[ ret.length ] = value; } } // Go through every key on the object, } else { for ( key in elems ) { value = callback( elems[ key ], key, arg ); if ( value != null ) { ret[ ret.length ] = value; } } } // Flatten any nested arrays return ret.concat.apply( [], ret ); }, // A global GUID counter for objects guid: 1, // Bind a function to a context, optionally partially applying any // arguments. proxy: function( fn, context ) { var tmp, args, proxy; if ( typeof context === "string" ) { tmp = fn[ context ]; context = fn; fn = tmp; } // Quick check to determine if target is callable, in the spec // this throws a TypeError, but we will just return undefined. if ( !jQuery.isFunction( fn ) ) { return undefined; } // Simulated bind args = core_slice.call( arguments, 2 ); proxy = function() { return fn.apply( context, args.concat( core_slice.call( arguments ) ) ); }; // Set the guid of unique handler to the same of original handler, so it can be removed proxy.guid = fn.guid = fn.guid || jQuery.guid++; return proxy; }, // Multifunctional method to get and set values of a collection // The value/s can optionally be executed if it's a function access: function( elems, fn, key, value, chainable, emptyGet, pass ) { var exec, bulk = key == null, i = 0, length = elems.length; // Sets many values if ( key && typeof key === "object" ) { for ( i in key ) { jQuery.access( elems, fn, i, key[i], 1, emptyGet, value ); } chainable = 1; // Sets one value } else if ( value !== undefined ) { // Optionally, function values get executed if exec is true exec = pass === undefined && jQuery.isFunction( value ); if ( bulk ) { // Bulk operations only iterate when executing function values if ( exec ) { exec = fn; fn = function( elem, key, value ) { return exec.call( jQuery( elem ), value ); }; // Otherwise they run against the entire set } else { fn.call( elems, value ); fn = null; } } if ( fn ) { for (; i < length; i++ ) { fn( elems[i], key, exec ? value.call( elems[i], i, fn( elems[i], key ) ) : value, pass ); } } chainable = 1; } return chainable ? elems : // Gets bulk ? fn.call( elems ) : length ? fn( elems[0], key ) : emptyGet; }, now: function() { return ( new Date() ).getTime(); } }); jQuery.ready.promise = function( obj ) { if ( !readyList ) { readyList = jQuery.Deferred(); // Catch cases where $(document).ready() is called after the browser event has already occurred. // we once tried to use readyState "interactive" here, but it caused issues like the one // discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15 if ( document.readyState === "complete" ) { // Handle it asynchronously to allow scripts the opportunity to delay ready setTimeout( jQuery.ready, 1 ); // Standards-based browsers support DOMContentLoaded } else if ( document.addEventListener ) { // Use the handy event callback document.addEventListener( "DOMContentLoaded", DOMContentLoaded, false ); // A fallback to window.onload, that will always work window.addEventListener( "load", jQuery.ready, false ); // If IE event model is used } else { // Ensure firing before onload, maybe late but safe also for iframes document.attachEvent( "onreadystatechange", DOMContentLoaded ); // A fallback to window.onload, that will always work window.attachEvent( "onload", jQuery.ready ); // If IE and not a frame // continually check to see if the document is ready var top = false; try { top = window.frameElement == null && document.documentElement; } catch(e) {} if ( top && top.doScroll ) { (function doScrollCheck() { if ( !jQuery.isReady ) { try { // Use the trick by Diego Perini // http://javascript.nwbox.com/IEContentLoaded/ top.doScroll("left"); } catch(e) { return setTimeout( doScrollCheck, 50 ); } // and execute any waiting functions jQuery.ready(); } })(); } } } return readyList.promise( obj ); }; // Populate the class2type map jQuery.each("Boolean Number String Function Array Date RegExp Object".split(" "), function(i, name) { class2type[ "[object " + name + "]" ] = name.toLowerCase(); }); // All jQuery objects should point back to these rootjQuery = jQuery(document); // String to Object options format cache var optionsCache = {}; // Convert String-formatted options into Object-formatted ones and store in cache function createOptions( options ) { var object = optionsCache[ options ] = {}; jQuery.each( options.split( core_rspace ), function( _, flag ) { object[ flag ] = true; }); return object; } /* * Create a callback list using the following parameters: * * options: an optional list of space-separated options that will change how * the callback list behaves or a more traditional option object * * By default a callback list will act like an event callback list and can be * "fired" multiple times. * * Possible options: * * once: will ensure the callback list can only be fired once (like a Deferred) * * memory: will keep track of previous values and will call any callback added * after the list has been fired right away with the latest "memorized" * values (like a Deferred) * * unique: will ensure a callback can only be added once (no duplicate in the list) * * stopOnFalse: interrupt callings when a callback returns false * */ jQuery.Callbacks = function( options ) { // Convert options from String-formatted to Object-formatted if needed // (we check in cache first) options = typeof options === "string" ? ( optionsCache[ options ] || createOptions( options ) ) : jQuery.extend( {}, options ); var // Last fire value (for non-forgettable lists) memory, // Flag to know if list was already fired fired, // Flag to know if list is currently firing firing, // First callback to fire (used internally by add and fireWith) firingStart, // End of the loop when firing firingLength, // Index of currently firing callback (modified by remove if needed) firingIndex, // Actual callback list list = [], // Stack of fire calls for repeatable lists stack = !options.once && [], // Fire callbacks fire = function( data ) { memory = options.memory && data; fired = true; firingIndex = firingStart || 0; firingStart = 0; firingLength = list.length; firing = true; for ( ; list && firingIndex < firingLength; firingIndex++ ) { if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) { memory = false; // To prevent further calls using add break; } } firing = false; if ( list ) { if ( stack ) { if ( stack.length ) { fire( stack.shift() ); } } else if ( memory ) { list = []; } else { self.disable(); } } }, // Actual Callbacks object self = { // Add a callback or a collection of callbacks to the list add: function() { if ( list ) { // First, we save the current length var start = list.length; (function add( args ) { jQuery.each( args, function( _, arg ) { var type = jQuery.type( arg ); if ( type === "function" ) { if ( !options.unique || !self.has( arg ) ) { list.push( arg ); } } else if ( arg && arg.length && type !== "string" ) { // Inspect recursively add( arg ); } }); })( arguments ); // Do we need to add the callbacks to the // current firing batch? if ( firing ) { firingLength = list.length; // With memory, if we're not firing then // we should call right away } else if ( memory ) { firingStart = start; fire( memory ); } } return this; }, // Remove a callback from the list remove: function() { if ( list ) { jQuery.each( arguments, function( _, arg ) { var index; while( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { list.splice( index, 1 ); // Handle firing indexes if ( firing ) { if ( index <= firingLength ) { firingLength--; } if ( index <= firingIndex ) { firingIndex--; } } } }); } return this; }, // Control if a given callback is in the list has: function( fn ) { return jQuery.inArray( fn, list ) > -1; }, // Remove all callbacks from the list empty: function() { list = []; return this; }, // Have the list do nothing anymore disable: function() { list = stack = memory = undefined; return this; }, // Is it disabled? disabled: function() { return !list; }, // Lock the list in its current state lock: function() { stack = undefined; if ( !memory ) { self.disable(); } return this; }, // Is it locked? locked: function() { return !stack; }, // Call all callbacks with the given context and arguments fireWith: function( context, args ) { args = args || []; args = [ context, args.slice ? args.slice() : args ]; if ( list && ( !fired || stack ) ) { if ( firing ) { stack.push( args ); } else { fire( args ); } } return this; }, // Call all the callbacks with the given arguments fire: function() { self.fireWith( this, arguments ); return this; }, // To know if the callbacks have already been called at least once fired: function() { return !!fired; } }; return self; }; jQuery.extend({ Deferred: function( func ) { var tuples = [ // action, add listener, listener list, final state [ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ], [ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ], [ "notify", "progress", jQuery.Callbacks("memory") ] ], state = "pending", promise = { state: function() { return state; }, always: function() { deferred.done( arguments ).fail( arguments ); return this; }, then: function( /* fnDone, fnFail, fnProgress */ ) { var fns = arguments; return jQuery.Deferred(function( newDefer ) { jQuery.each( tuples, function( i, tuple ) { var action = tuple[ 0 ], fn = fns[ i ]; // deferred[ done | fail | progress ] for forwarding actions to newDefer deferred[ tuple[1] ]( jQuery.isFunction( fn ) ? function() { var returned = fn.apply( this, arguments ); if ( returned && jQuery.isFunction( returned.promise ) ) { returned.promise() .done( newDefer.resolve ) .fail( newDefer.reject ) .progress( newDefer.notify ); } else { newDefer[ action + "With" ]( this === deferred ? newDefer : this, [ returned ] ); } } : newDefer[ action ] ); }); fns = null; }).promise(); }, // Get a promise for this deferred // If obj is provided, the promise aspect is added to the object promise: function( obj ) { return obj != null ? jQuery.extend( obj, promise ) : promise; } }, deferred = {}; // Keep pipe for back-compat promise.pipe = promise.then; // Add list-specific methods jQuery.each( tuples, function( i, tuple ) { var list = tuple[ 2 ], stateString = tuple[ 3 ]; // promise[ done | fail | progress ] = list.add promise[ tuple[1] ] = list.add; // Handle state if ( stateString ) { list.add(function() { // state = [ resolved | rejected ] state = stateString; // [ reject_list | resolve_list ].disable; progress_list.lock }, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock ); } // deferred[ resolve | reject | notify ] = list.fire deferred[ tuple[0] ] = list.fire; deferred[ tuple[0] + "With" ] = list.fireWith; }); // Make the deferred a promise promise.promise( deferred ); // Call given func if any if ( func ) { func.call( deferred, deferred ); } // All done! return deferred; }, // Deferred helper when: function( subordinate /* , ..., subordinateN */ ) { var i = 0, resolveValues = core_slice.call( arguments ), length = resolveValues.length, // the count of uncompleted subordinates remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0, // the master Deferred. If resolveValues consist of only a single Deferred, just use that. deferred = remaining === 1 ? subordinate : jQuery.Deferred(), // Update function for both resolve and progress values updateFunc = function( i, contexts, values ) { return function( value ) { contexts[ i ] = this; values[ i ] = arguments.length > 1 ? core_slice.call( arguments ) : value; if( values === progressValues ) { deferred.notifyWith( contexts, values ); } else if ( !( --remaining ) ) { deferred.resolveWith( contexts, values ); } }; }, progressValues, progressContexts, resolveContexts; // add listeners to Deferred subordinates; treat others as resolved if ( length > 1 ) { progressValues = new Array( length ); progressContexts = new Array( length ); resolveContexts = new Array( length ); for ( ; i < length; i++ ) { if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) { resolveValues[ i ].promise() .done( updateFunc( i, resolveContexts, resolveValues ) ) .fail( deferred.reject ) .progress( updateFunc( i, progressContexts, progressValues ) ); } else { --remaining; } } } // if we're not waiting on anything, resolve the master if ( !remaining ) { deferred.resolveWith( resolveContexts, resolveValues ); } return deferred.promise(); } }); jQuery.support = (function() { var support, all, a, select, opt, input, fragment, eventName, i, isSupported, clickFn, div = document.createElement("div"); // Setup div.setAttribute( "className", "t" ); div.innerHTML = "
a"; // Support tests won't run in some limited or non-browser environments all = div.getElementsByTagName("*"); a = div.getElementsByTagName("a")[ 0 ]; if ( !all || !a || !all.length ) { return {}; } // First batch of tests select = document.createElement("select"); opt = select.appendChild( document.createElement("option") ); input = div.getElementsByTagName("input")[ 0 ]; a.style.cssText = "top:1px;float:left;opacity:.5"; support = { // IE strips leading whitespace when .innerHTML is used leadingWhitespace: ( div.firstChild.nodeType === 3 ), // Make sure that tbody elements aren't automatically inserted // IE will insert them into empty tables tbody: !div.getElementsByTagName("tbody").length, // Make sure that link elements get serialized correctly by innerHTML // This requires a wrapper element in IE htmlSerialize: !!div.getElementsByTagName("link").length, // Get the style information from getAttribute // (IE uses .cssText instead) style: /top/.test( a.getAttribute("style") ), // Make sure that URLs aren't manipulated // (IE normalizes it by default) hrefNormalized: ( a.getAttribute("href") === "/a" ), // Make sure that element opacity exists // (IE uses filter instead) // Use a regex to work around a WebKit issue. See #5145 opacity: /^0.5/.test( a.style.opacity ), // Verify style float existence // (IE uses styleFloat instead of cssFloat) cssFloat: !!a.style.cssFloat, // Make sure that if no value is specified for a checkbox // that it defaults to "on". // (WebKit defaults to "" instead) checkOn: ( input.value === "on" ), // Make sure that a selected-by-default option has a working selected property. // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) optSelected: opt.selected, // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) getSetAttribute: div.className !== "t", // Tests for enctype support on a form (#6743) enctype: !!document.createElement("form").enctype, // Makes sure cloning an html5 element does not cause problems // Where outerHTML is undefined, this still works html5Clone: document.createElement("nav").cloneNode( true ).outerHTML !== "<:nav>", // jQuery.support.boxModel DEPRECATED in 1.8 since we don't support Quirks Mode boxModel: ( document.compatMode === "CSS1Compat" ), // Will be defined later submitBubbles: true, changeBubbles: true, focusinBubbles: false, deleteExpando: true, noCloneEvent: true, inlineBlockNeedsLayout: false, shrinkWrapBlocks: false, reliableMarginRight: true, boxSizingReliable: true, pixelPosition: false }; // Make sure checked status is properly cloned input.checked = true; support.noCloneChecked = input.cloneNode( true ).checked; // Make sure that the options inside disabled selects aren't marked as disabled // (WebKit marks them as disabled) select.disabled = true; support.optDisabled = !opt.disabled; // Test to see if it's possible to delete an expando from an element // Fails in Internet Explorer try { delete div.test; } catch( e ) { support.deleteExpando = false; } if ( !div.addEventListener && div.attachEvent && div.fireEvent ) { div.attachEvent( "onclick", clickFn = function() { // Cloning a node shouldn't copy over any // bound event handlers (IE does this) support.noCloneEvent = false; }); div.cloneNode( true ).fireEvent("onclick"); div.detachEvent( "onclick", clickFn ); } // Check if a radio maintains its value // after being appended to the DOM input = document.createElement("input"); input.value = "t"; input.setAttribute( "type", "radio" ); support.radioValue = input.value === "t"; input.setAttribute( "checked", "checked" ); // #11217 - WebKit loses check when the name is after the checked attribute input.setAttribute( "name", "t" ); div.appendChild( input ); fragment = document.createDocumentFragment(); fragment.appendChild( div.lastChild ); // WebKit doesn't clone checked state correctly in fragments support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; // Check if a disconnected checkbox will retain its checked // value of true after appended to the DOM (IE6/7) support.appendChecked = input.checked; fragment.removeChild( input ); fragment.appendChild( div ); // Technique from Juriy Zaytsev // http://perfectionkills.com/detecting-event-support-without-browser-sniffing/ // We only care about the case where non-standard event systems // are used, namely in IE. Short-circuiting here helps us to // avoid an eval call (in setAttribute) which can cause CSP // to go haywire. See: https://developer.mozilla.org/en/Security/CSP if ( div.attachEvent ) { for ( i in { submit: true, change: true, focusin: true }) { eventName = "on" + i; isSupported = ( eventName in div ); if ( !isSupported ) { div.setAttribute( eventName, "return;" ); isSupported = ( typeof div[ eventName ] === "function" ); } support[ i + "Bubbles" ] = isSupported; } } // Run tests that need a body at doc ready jQuery(function() { var container, div, tds, marginDiv, divReset = "padding:0;margin:0;border:0;display:block;overflow:hidden;", body = document.getElementsByTagName("body")[0]; if ( !body ) { // Return for frameset docs that don't have a body return; } container = document.createElement("div"); container.style.cssText = "visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px"; body.insertBefore( container, body.firstChild ); // Construct the test element div = document.createElement("div"); container.appendChild( div ); // Check if table cells still have offsetWidth/Height when they are set // to display:none and there are still other visible table cells in a // table row; if so, offsetWidth/Height are not reliable for use when // determining if an element has been hidden directly using // display:none (it is still safe to use offsets if a parent element is // hidden; don safety goggles and see bug #4512 for more information). // (only IE 8 fails this test) div.innerHTML = "
t
"; tds = div.getElementsByTagName("td"); tds[ 0 ].style.cssText = "padding:0;margin:0;border:0;display:none"; isSupported = ( tds[ 0 ].offsetHeight === 0 ); tds[ 0 ].style.display = ""; tds[ 1 ].style.display = "none"; // Check if empty table cells still have offsetWidth/Height // (IE <= 8 fail this test) support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); // Check box-sizing and margin behavior div.innerHTML = ""; div.style.cssText = "box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;"; support.boxSizing = ( div.offsetWidth === 4 ); support.doesNotIncludeMarginInBodyOffset = ( body.offsetTop !== 1 ); // NOTE: To any future maintainer, we've window.getComputedStyle // because jsdom on node.js will break without it. if ( window.getComputedStyle ) { support.pixelPosition = ( window.getComputedStyle( div, null ) || {} ).top !== "1%"; support.boxSizingReliable = ( window.getComputedStyle( div, null ) || { width: "4px" } ).width === "4px"; // Check if div with explicit width and no margin-right incorrectly // gets computed margin-right based on width of container. For more // info see bug #3333 // Fails in WebKit before Feb 2011 nightlies // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right marginDiv = document.createElement("div"); marginDiv.style.cssText = div.style.cssText = divReset; marginDiv.style.marginRight = marginDiv.style.width = "0"; div.style.width = "1px"; div.appendChild( marginDiv ); support.reliableMarginRight = !parseFloat( ( window.getComputedStyle( marginDiv, null ) || {} ).marginRight ); } if ( typeof div.style.zoom !== "undefined" ) { // Check if natively block-level elements act like inline-block // elements when setting their display to 'inline' and giving // them layout // (IE < 8 does this) div.innerHTML = ""; div.style.cssText = divReset + "width:1px;padding:1px;display:inline;zoom:1"; support.inlineBlockNeedsLayout = ( div.offsetWidth === 3 ); // Check if elements with layout shrink-wrap their children // (IE 6 does this) div.style.display = "block"; div.style.overflow = "visible"; div.innerHTML = "
"; div.firstChild.style.width = "5px"; support.shrinkWrapBlocks = ( div.offsetWidth !== 3 ); container.style.zoom = 1; } // Null elements to avoid leaks in IE body.removeChild( container ); container = div = tds = marginDiv = null; }); // Null elements to avoid leaks in IE fragment.removeChild( div ); all = a = select = opt = input = fragment = div = null; return support; })(); var rbrace = /(?:\{[\s\S]*\}|\[[\s\S]*\])$/, rmultiDash = /([A-Z])/g; jQuery.extend({ cache: {}, deletedIds: [], // Remove at next major release (1.9/2.0) uuid: 0, // Unique for each copy of jQuery on the page // Non-digits removed to match rinlinejQuery expando: "jQuery" + ( jQuery.fn.jquery + Math.random() ).replace( /\D/g, "" ), // The following elements throw uncatchable exceptions if you // attempt to add expando properties to them. noData: { "embed": true, // Ban all objects except for Flash (which handle expandos) "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", "applet": true }, hasData: function( elem ) { elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; return !!elem && !isEmptyDataObject( elem ); }, data: function( elem, name, data, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var thisCache, ret, internalKey = jQuery.expando, getByName = typeof name === "string", // We have to handle DOM nodes and JS objects differently because IE6-7 // can't GC object references properly across the DOM-JS boundary isNode = elem.nodeType, // Only DOM nodes need the global jQuery cache; JS object data is // attached directly to the object so GC can occur automatically cache = isNode ? jQuery.cache : elem, // Only defining an ID for JS objects if its cache already exists allows // the code to shortcut on the same path as a DOM node with no cache id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey; // Avoid doing any more work than we need to when trying to get data on an // object that has no data at all if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && getByName && data === undefined ) { return; } if ( !id ) { // Only DOM nodes need a new unique ID for each element since their data // ends up in the global cache if ( isNode ) { elem[ internalKey ] = id = jQuery.deletedIds.pop() || jQuery.guid++; } else { id = internalKey; } } if ( !cache[ id ] ) { cache[ id ] = {}; // Avoids exposing jQuery metadata on plain JS objects when the object // is serialized using JSON.stringify if ( !isNode ) { cache[ id ].toJSON = jQuery.noop; } } // An object can be passed to jQuery.data instead of a key/value pair; this gets // shallow copied over onto the existing cache if ( typeof name === "object" || typeof name === "function" ) { if ( pvt ) { cache[ id ] = jQuery.extend( cache[ id ], name ); } else { cache[ id ].data = jQuery.extend( cache[ id ].data, name ); } } thisCache = cache[ id ]; // jQuery data() is stored in a separate object inside the object's internal data // cache in order to avoid key collisions between internal data and user-defined // data. if ( !pvt ) { if ( !thisCache.data ) { thisCache.data = {}; } thisCache = thisCache.data; } if ( data !== undefined ) { thisCache[ jQuery.camelCase( name ) ] = data; } // Check for both converted-to-camel and non-converted data property names // If a data property was specified if ( getByName ) { // First Try to find as-is property data ret = thisCache[ name ]; // Test for null|undefined property data if ( ret == null ) { // Try to find the camelCased property ret = thisCache[ jQuery.camelCase( name ) ]; } } else { ret = thisCache; } return ret; }, removeData: function( elem, name, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var thisCache, i, l, isNode = elem.nodeType, // See jQuery.data for more information cache = isNode ? jQuery.cache : elem, id = isNode ? elem[ jQuery.expando ] : jQuery.expando; // If there is already no cache entry for this object, there is no // purpose in continuing if ( !cache[ id ] ) { return; } if ( name ) { thisCache = pvt ? cache[ id ] : cache[ id ].data; if ( thisCache ) { // Support array or space separated string names for data keys if ( !jQuery.isArray( name ) ) { // try the string as a key before any manipulation if ( name in thisCache ) { name = [ name ]; } else { // split the camel cased version by spaces unless a key with the spaces exists name = jQuery.camelCase( name ); if ( name in thisCache ) { name = [ name ]; } else { name = name.split(" "); } } } for ( i = 0, l = name.length; i < l; i++ ) { delete thisCache[ name[i] ]; } // If there is no data left in the cache, we want to continue // and let the cache object itself get destroyed if ( !( pvt ? isEmptyDataObject : jQuery.isEmptyObject )( thisCache ) ) { return; } } } // See jQuery.data for more information if ( !pvt ) { delete cache[ id ].data; // Don't destroy the parent cache unless the internal data object // had been the only thing left in it if ( !isEmptyDataObject( cache[ id ] ) ) { return; } } // Destroy the cache if ( isNode ) { jQuery.cleanData( [ elem ], true ); // Use delete when supported for expandos or `cache` is not a window per isWindow (#10080) } else if ( jQuery.support.deleteExpando || cache != cache.window ) { delete cache[ id ]; // When all else fails, null } else { cache[ id ] = null; } }, // For internal use only. _data: function( elem, name, data ) { return jQuery.data( elem, name, data, true ); }, // A method for determining if a DOM node can handle the data expando acceptData: function( elem ) { var noData = elem.nodeName && jQuery.noData[ elem.nodeName.toLowerCase() ]; // nodes accept data unless otherwise specified; rejection can be conditional return !noData || noData !== true && elem.getAttribute("classid") === noData; } }); jQuery.fn.extend({ data: function( key, value ) { var parts, part, attr, name, l, elem = this[0], i = 0, data = null; // Gets all values if ( key === undefined ) { if ( this.length ) { data = jQuery.data( elem ); if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) { attr = elem.attributes; for ( l = attr.length; i < l; i++ ) { name = attr[i].name; if ( !name.indexOf( "data-" ) ) { name = jQuery.camelCase( name.substring(5) ); dataAttr( elem, name, data[ name ] ); } } jQuery._data( elem, "parsedAttrs", true ); } } return data; } // Sets multiple values if ( typeof key === "object" ) { return this.each(function() { jQuery.data( this, key ); }); } parts = key.split( ".", 2 ); parts[1] = parts[1] ? "." + parts[1] : ""; part = parts[1] + "!"; return jQuery.access( this, function( value ) { if ( value === undefined ) { data = this.triggerHandler( "getData" + part, [ parts[0] ] ); // Try to fetch any internally stored data first if ( data === undefined && elem ) { data = jQuery.data( elem, key ); data = dataAttr( elem, key, data ); } return data === undefined && parts[1] ? this.data( parts[0] ) : data; } parts[1] = value; this.each(function() { var self = jQuery( this ); self.triggerHandler( "setData" + part, parts ); jQuery.data( this, key, value ); self.triggerHandler( "changeData" + part, parts ); }); }, null, value, arguments.length > 1, null, false ); }, removeData: function( key ) { return this.each(function() { jQuery.removeData( this, key ); }); } }); function dataAttr( elem, key, data ) { // If nothing was found internally, try to fetch any // data from the HTML5 data-* attribute if ( data === undefined && elem.nodeType === 1 ) { var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase(); data = elem.getAttribute( name ); if ( typeof data === "string" ) { try { data = data === "true" ? true : data === "false" ? false : data === "null" ? null : // Only convert to a number if it doesn't change the string +data + "" === data ? +data : rbrace.test( data ) ? jQuery.parseJSON( data ) : data; } catch( e ) {} // Make sure we set the data so it isn't changed later jQuery.data( elem, key, data ); } else { data = undefined; } } return data; } // checks a cache object for emptiness function isEmptyDataObject( obj ) { var name; for ( name in obj ) { // if the public data object is empty, the private is still empty if ( name === "data" && jQuery.isEmptyObject( obj[name] ) ) { continue; } if ( name !== "toJSON" ) { return false; } } return true; } jQuery.extend({ queue: function( elem, type, data ) { var queue; if ( elem ) { type = ( type || "fx" ) + "queue"; queue = jQuery._data( elem, type ); // Speed up dequeue by getting out quickly if this is just a lookup if ( data ) { if ( !queue || jQuery.isArray(data) ) { queue = jQuery._data( elem, type, jQuery.makeArray(data) ); } else { queue.push( data ); } } return queue || []; } }, dequeue: function( elem, type ) { type = type || "fx"; var queue = jQuery.queue( elem, type ), startLength = queue.length, fn = queue.shift(), hooks = jQuery._queueHooks( elem, type ), next = function() { jQuery.dequeue( elem, type ); }; // If the fx queue is dequeued, always remove the progress sentinel if ( fn === "inprogress" ) { fn = queue.shift(); startLength--; } if ( fn ) { // Add a progress sentinel to prevent the fx queue from being // automatically dequeued if ( type === "fx" ) { queue.unshift( "inprogress" ); } // clear up the last queue stop function delete hooks.stop; fn.call( elem, next, hooks ); } if ( !startLength && hooks ) { hooks.empty.fire(); } }, // not intended for public consumption - generates a queueHooks object, or returns the current one _queueHooks: function( elem, type ) { var key = type + "queueHooks"; return jQuery._data( elem, key ) || jQuery._data( elem, key, { empty: jQuery.Callbacks("once memory").add(function() { jQuery.removeData( elem, type + "queue", true ); jQuery.removeData( elem, key, true ); }) }); } }); jQuery.fn.extend({ queue: function( type, data ) { var setter = 2; if ( typeof type !== "string" ) { data = type; type = "fx"; setter--; } if ( arguments.length < setter ) { return jQuery.queue( this[0], type ); } return data === undefined ? this : this.each(function() { var queue = jQuery.queue( this, type, data ); // ensure a hooks for this queue jQuery._queueHooks( this, type ); if ( type === "fx" && queue[0] !== "inprogress" ) { jQuery.dequeue( this, type ); } }); }, dequeue: function( type ) { return this.each(function() { jQuery.dequeue( this, type ); }); }, // Based off of the plugin by Clint Helfers, with permission. // http://blindsignals.com/index.php/2009/07/jquery-delay/ delay: function( time, type ) { time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; type = type || "fx"; return this.queue( type, function( next, hooks ) { var timeout = setTimeout( next, time ); hooks.stop = function() { clearTimeout( timeout ); }; }); }, clearQueue: function( type ) { return this.queue( type || "fx", [] ); }, // Get a promise resolved when queues of a certain type // are emptied (fx is the type by default) promise: function( type, obj ) { var tmp, count = 1, defer = jQuery.Deferred(), elements = this, i = this.length, resolve = function() { if ( !( --count ) ) { defer.resolveWith( elements, [ elements ] ); } }; if ( typeof type !== "string" ) { obj = type; type = undefined; } type = type || "fx"; while( i-- ) { tmp = jQuery._data( elements[ i ], type + "queueHooks" ); if ( tmp && tmp.empty ) { count++; tmp.empty.add( resolve ); } } resolve(); return defer.promise( obj ); } }); var nodeHook, boolHook, fixSpecified, rclass = /[\t\r\n]/g, rreturn = /\r/g, rtype = /^(?:button|input)$/i, rfocusable = /^(?:button|input|object|select|textarea)$/i, rclickable = /^a(?:rea|)$/i, rboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i, getSetAttribute = jQuery.support.getSetAttribute; jQuery.fn.extend({ attr: function( name, value ) { return jQuery.access( this, jQuery.attr, name, value, arguments.length > 1 ); }, removeAttr: function( name ) { return this.each(function() { jQuery.removeAttr( this, name ); }); }, prop: function( name, value ) { return jQuery.access( this, jQuery.prop, name, value, arguments.length > 1 ); }, removeProp: function( name ) { name = jQuery.propFix[ name ] || name; return this.each(function() { // try/catch handles cases where IE balks (such as removing a property on window) try { this[ name ] = undefined; delete this[ name ]; } catch( e ) {} }); }, addClass: function( value ) { var classNames, i, l, elem, setClass, c, cl; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).addClass( value.call(this, j, this.className) ); }); } if ( value && typeof value === "string" ) { classNames = value.split( core_rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 ) { if ( !elem.className && classNames.length === 1 ) { elem.className = value; } else { setClass = " " + elem.className + " "; for ( c = 0, cl = classNames.length; c < cl; c++ ) { if ( setClass.indexOf( " " + classNames[ c ] + " " ) < 0 ) { setClass += classNames[ c ] + " "; } } elem.className = jQuery.trim( setClass ); } } } } return this; }, removeClass: function( value ) { var removes, className, elem, c, cl, i, l; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).removeClass( value.call(this, j, this.className) ); }); } if ( (value && typeof value === "string") || value === undefined ) { removes = ( value || "" ).split( core_rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 && elem.className ) { className = (" " + elem.className + " ").replace( rclass, " " ); // loop over each item in the removal list for ( c = 0, cl = removes.length; c < cl; c++ ) { // Remove until there is nothing to remove, while ( className.indexOf(" " + removes[ c ] + " ") >= 0 ) { className = className.replace( " " + removes[ c ] + " " , " " ); } } elem.className = value ? jQuery.trim( className ) : ""; } } } return this; }, toggleClass: function( value, stateVal ) { var type = typeof value, isBool = typeof stateVal === "boolean"; if ( jQuery.isFunction( value ) ) { return this.each(function( i ) { jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal ); }); } return this.each(function() { if ( type === "string" ) { // toggle individual class names var className, i = 0, self = jQuery( this ), state = stateVal, classNames = value.split( core_rspace ); while ( (className = classNames[ i++ ]) ) { // check each className given, space separated list state = isBool ? state : !self.hasClass( className ); self[ state ? "addClass" : "removeClass" ]( className ); } } else if ( type === "undefined" || type === "boolean" ) { if ( this.className ) { // store className if set jQuery._data( this, "__className__", this.className ); } // toggle whole className this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; } }); }, hasClass: function( selector ) { var className = " " + selector + " ", i = 0, l = this.length; for ( ; i < l; i++ ) { if ( this[i].nodeType === 1 && (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) >= 0 ) { return true; } } return false; }, val: function( value ) { var hooks, ret, isFunction, elem = this[0]; if ( !arguments.length ) { if ( elem ) { hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ]; if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { return ret; } ret = elem.value; return typeof ret === "string" ? // handle most common string cases ret.replace(rreturn, "") : // handle cases where value is null/undef or number ret == null ? "" : ret; } return; } isFunction = jQuery.isFunction( value ); return this.each(function( i ) { var val, self = jQuery(this); if ( this.nodeType !== 1 ) { return; } if ( isFunction ) { val = value.call( this, i, self.val() ); } else { val = value; } // Treat null/undefined as ""; convert numbers to string if ( val == null ) { val = ""; } else if ( typeof val === "number" ) { val += ""; } else if ( jQuery.isArray( val ) ) { val = jQuery.map(val, function ( value ) { return value == null ? "" : value + ""; }); } hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; // If set returns undefined, fall back to normal setting if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { this.value = val; } }); } }); jQuery.extend({ valHooks: { option: { get: function( elem ) { // attributes.value is undefined in Blackberry 4.7 but // uses .value. See #6932 var val = elem.attributes.value; return !val || val.specified ? elem.value : elem.text; } }, select: { get: function( elem ) { var value, option, options = elem.options, index = elem.selectedIndex, one = elem.type === "select-one" || index < 0, values = one ? null : [], max = one ? index + 1 : options.length, i = index < 0 ? max : one ? index : 0; // Loop through all the selected options for ( ; i < max; i++ ) { option = options[ i ]; // oldIE doesn't update selected after form reset (#2551) if ( ( option.selected || i === index ) && // Don't return options that are disabled or in a disabled optgroup ( jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null ) && ( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) { // Get the specific value for the option value = jQuery( option ).val(); // We don't need an array for one selects if ( one ) { return value; } // Multi-Selects return an array values.push( value ); } } return values; }, set: function( elem, value ) { var values = jQuery.makeArray( value ); jQuery(elem).find("option").each(function() { this.selected = jQuery.inArray( jQuery(this).val(), values ) >= 0; }); if ( !values.length ) { elem.selectedIndex = -1; } return values; } } }, // Unused in 1.8, left in so attrFn-stabbers won't die; remove in 1.9 attrFn: {}, attr: function( elem, name, value, pass ) { var ret, hooks, notxml, nType = elem.nodeType; // don't get/set attributes on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return; } if ( pass && jQuery.isFunction( jQuery.fn[ name ] ) ) { return jQuery( elem )[ name ]( value ); } // Fallback to prop when attributes are not supported if ( typeof elem.getAttribute === "undefined" ) { return jQuery.prop( elem, name, value ); } notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); // All attributes are lowercase // Grab necessary hook if one is defined if ( notxml ) { name = name.toLowerCase(); hooks = jQuery.attrHooks[ name ] || ( rboolean.test( name ) ? boolHook : nodeHook ); } if ( value !== undefined ) { if ( value === null ) { jQuery.removeAttr( elem, name ); return; } else if ( hooks && "set" in hooks && notxml && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { elem.setAttribute( name, value + "" ); return value; } } else if ( hooks && "get" in hooks && notxml && (ret = hooks.get( elem, name )) !== null ) { return ret; } else { ret = elem.getAttribute( name ); // Non-existent attributes return null, we normalize to undefined return ret === null ? undefined : ret; } }, removeAttr: function( elem, value ) { var propName, attrNames, name, isBool, i = 0; if ( value && elem.nodeType === 1 ) { attrNames = value.split( core_rspace ); for ( ; i < attrNames.length; i++ ) { name = attrNames[ i ]; if ( name ) { propName = jQuery.propFix[ name ] || name; isBool = rboolean.test( name ); // See #9699 for explanation of this approach (setting first, then removal) // Do not do this for boolean attributes (see #10870) if ( !isBool ) { jQuery.attr( elem, name, "" ); } elem.removeAttribute( getSetAttribute ? name : propName ); // Set corresponding property to false for boolean attributes if ( isBool && propName in elem ) { elem[ propName ] = false; } } } } }, attrHooks: { type: { set: function( elem, value ) { // We can't allow the type property to be changed (since it causes problems in IE) if ( rtype.test( elem.nodeName ) && elem.parentNode ) { jQuery.error( "type property can't be changed" ); } else if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { // Setting the type on a radio button after the value resets the value in IE6-9 // Reset value to it's default in case type is set after value // This is for element creation var val = elem.value; elem.setAttribute( "type", value ); if ( val ) { elem.value = val; } return value; } } }, // Use the value property for back compat // Use the nodeHook for button elements in IE6/7 (#1954) value: { get: function( elem, name ) { if ( nodeHook && jQuery.nodeName( elem, "button" ) ) { return nodeHook.get( elem, name ); } return name in elem ? elem.value : null; }, set: function( elem, value, name ) { if ( nodeHook && jQuery.nodeName( elem, "button" ) ) { return nodeHook.set( elem, value, name ); } // Does not return so that setAttribute is also used elem.value = value; } } }, propFix: { tabindex: "tabIndex", readonly: "readOnly", "for": "htmlFor", "class": "className", maxlength: "maxLength", cellspacing: "cellSpacing", cellpadding: "cellPadding", rowspan: "rowSpan", colspan: "colSpan", usemap: "useMap", frameborder: "frameBorder", contenteditable: "contentEditable" }, prop: function( elem, name, value ) { var ret, hooks, notxml, nType = elem.nodeType; // don't get/set properties on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return; } notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); if ( notxml ) { // Fix name and attach hooks name = jQuery.propFix[ name ] || name; hooks = jQuery.propHooks[ name ]; } if ( value !== undefined ) { if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { return ( elem[ name ] = value ); } } else { if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ) { return ret; } else { return elem[ name ]; } } }, propHooks: { tabIndex: { get: function( elem ) { // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ var attributeNode = elem.getAttributeNode("tabindex"); return attributeNode && attributeNode.specified ? parseInt( attributeNode.value, 10 ) : rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? 0 : undefined; } } } }); // Hook for boolean attributes boolHook = { get: function( elem, name ) { // Align boolean attributes with corresponding properties // Fall back to attribute presence where some booleans are not supported var attrNode, property = jQuery.prop( elem, name ); return property === true || typeof property !== "boolean" && ( attrNode = elem.getAttributeNode(name) ) && attrNode.nodeValue !== false ? name.toLowerCase() : undefined; }, set: function( elem, value, name ) { var propName; if ( value === false ) { // Remove boolean attributes when set to false jQuery.removeAttr( elem, name ); } else { // value is true since we know at this point it's type boolean and not false // Set boolean attributes to the same name and set the DOM property propName = jQuery.propFix[ name ] || name; if ( propName in elem ) { // Only set the IDL specifically if it already exists on the element elem[ propName ] = true; } elem.setAttribute( name, name.toLowerCase() ); } return name; } }; // IE6/7 do not support getting/setting some attributes with get/setAttribute if ( !getSetAttribute ) { fixSpecified = { name: true, id: true, coords: true }; // Use this for any attribute in IE6/7 // This fixes almost every IE6/7 issue nodeHook = jQuery.valHooks.button = { get: function( elem, name ) { var ret; ret = elem.getAttributeNode( name ); return ret && ( fixSpecified[ name ] ? ret.value !== "" : ret.specified ) ? ret.value : undefined; }, set: function( elem, value, name ) { // Set the existing or create a new attribute node var ret = elem.getAttributeNode( name ); if ( !ret ) { ret = document.createAttribute( name ); elem.setAttributeNode( ret ); } return ( ret.value = value + "" ); } }; // Set width and height to auto instead of 0 on empty string( Bug #8150 ) // This is for removals jQuery.each([ "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { set: function( elem, value ) { if ( value === "" ) { elem.setAttribute( name, "auto" ); return value; } } }); }); // Set contenteditable to false on removals(#10429) // Setting to empty string throws an error as an invalid value jQuery.attrHooks.contenteditable = { get: nodeHook.get, set: function( elem, value, name ) { if ( value === "" ) { value = "false"; } nodeHook.set( elem, value, name ); } }; } // Some attributes require a special call on IE if ( !jQuery.support.hrefNormalized ) { jQuery.each([ "href", "src", "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { get: function( elem ) { var ret = elem.getAttribute( name, 2 ); return ret === null ? undefined : ret; } }); }); } if ( !jQuery.support.style ) { jQuery.attrHooks.style = { get: function( elem ) { // Return undefined in the case of empty string // Normalize to lowercase since IE uppercases css property names return elem.style.cssText.toLowerCase() || undefined; }, set: function( elem, value ) { return ( elem.style.cssText = value + "" ); } }; } // Safari mis-reports the default selected property of an option // Accessing the parent's selectedIndex property fixes it if ( !jQuery.support.optSelected ) { jQuery.propHooks.selected = jQuery.extend( jQuery.propHooks.selected, { get: function( elem ) { var parent = elem.parentNode; if ( parent ) { parent.selectedIndex; // Make sure that it also works with optgroups, see #5701 if ( parent.parentNode ) { parent.parentNode.selectedIndex; } } return null; } }); } // IE6/7 call enctype encoding if ( !jQuery.support.enctype ) { jQuery.propFix.enctype = "encoding"; } // Radios and checkboxes getter/setter if ( !jQuery.support.checkOn ) { jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = { get: function( elem ) { // Handle the case where in Webkit "" is returned instead of "on" if a value isn't specified return elem.getAttribute("value") === null ? "on" : elem.value; } }; }); } jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = jQuery.extend( jQuery.valHooks[ this ], { set: function( elem, value ) { if ( jQuery.isArray( value ) ) { return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 ); } } }); }); var rformElems = /^(?:textarea|input|select)$/i, rtypenamespace = /^([^\.]*|)(?:\.(.+)|)$/, rhoverHack = /(?:^|\s)hover(\.\S+|)\b/, rkeyEvent = /^key/, rmouseEvent = /^(?:mouse|contextmenu)|click/, rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, hoverHack = function( events ) { return jQuery.event.special.hover ? events : events.replace( rhoverHack, "mouseenter$1 mouseleave$1" ); }; /* * Helper functions for managing events -- not part of the public interface. * Props to Dean Edwards' addEvent library for many of the ideas. */ jQuery.event = { add: function( elem, types, handler, data, selector ) { var elemData, eventHandle, events, t, tns, type, namespaces, handleObj, handleObjIn, handlers, special; // Don't attach events to noData or text/comment nodes (allow plain objects tho) if ( elem.nodeType === 3 || elem.nodeType === 8 || !types || !handler || !(elemData = jQuery._data( elem )) ) { return; } // Caller can pass in an object of custom data in lieu of the handler if ( handler.handler ) { handleObjIn = handler; handler = handleObjIn.handler; selector = handleObjIn.selector; } // Make sure that the handler has a unique ID, used to find/remove it later if ( !handler.guid ) { handler.guid = jQuery.guid++; } // Init the element's event structure and main handler, if this is the first events = elemData.events; if ( !events ) { elemData.events = events = {}; } eventHandle = elemData.handle; if ( !eventHandle ) { elemData.handle = eventHandle = function( e ) { // Discard the second event of a jQuery.event.trigger() and // when an event is called after a page has unloaded return typeof jQuery !== "undefined" && (!e || jQuery.event.triggered !== e.type) ? jQuery.event.dispatch.apply( eventHandle.elem, arguments ) : undefined; }; // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events eventHandle.elem = elem; } // Handle multiple events separated by a space // jQuery(...).bind("mouseover mouseout", fn); types = jQuery.trim( hoverHack(types) ).split( " " ); for ( t = 0; t < types.length; t++ ) { tns = rtypenamespace.exec( types[t] ) || []; type = tns[1]; namespaces = ( tns[2] || "" ).split( "." ).sort(); // If event changes its type, use the special event handlers for the changed type special = jQuery.event.special[ type ] || {}; // If selector defined, determine special event api type, otherwise given type type = ( selector ? special.delegateType : special.bindType ) || type; // Update special based on newly reset type special = jQuery.event.special[ type ] || {}; // handleObj is passed to all event handlers handleObj = jQuery.extend({ type: type, origType: tns[1], data: data, handler: handler, guid: handler.guid, selector: selector, needsContext: selector && jQuery.expr.match.needsContext.test( selector ), namespace: namespaces.join(".") }, handleObjIn ); // Init the event handler queue if we're the first handlers = events[ type ]; if ( !handlers ) { handlers = events[ type ] = []; handlers.delegateCount = 0; // Only use addEventListener/attachEvent if the special events handler returns false if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { // Bind the global event handler to the element if ( elem.addEventListener ) { elem.addEventListener( type, eventHandle, false ); } else if ( elem.attachEvent ) { elem.attachEvent( "on" + type, eventHandle ); } } } if ( special.add ) { special.add.call( elem, handleObj ); if ( !handleObj.handler.guid ) { handleObj.handler.guid = handler.guid; } } // Add to the element's handler list, delegates in front if ( selector ) { handlers.splice( handlers.delegateCount++, 0, handleObj ); } else { handlers.push( handleObj ); } // Keep track of which events have ever been used, for event optimization jQuery.event.global[ type ] = true; } // Nullify elem to prevent memory leaks in IE elem = null; }, global: {}, // Detach an event or set of events from an element remove: function( elem, types, handler, selector, mappedTypes ) { var t, tns, type, origType, namespaces, origCount, j, events, special, eventType, handleObj, elemData = jQuery.hasData( elem ) && jQuery._data( elem ); if ( !elemData || !(events = elemData.events) ) { return; } // Once for each type.namespace in types; type may be omitted types = jQuery.trim( hoverHack( types || "" ) ).split(" "); for ( t = 0; t < types.length; t++ ) { tns = rtypenamespace.exec( types[t] ) || []; type = origType = tns[1]; namespaces = tns[2]; // Unbind all events (on this namespace, if provided) for the element if ( !type ) { for ( type in events ) { jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); } continue; } special = jQuery.event.special[ type ] || {}; type = ( selector? special.delegateType : special.bindType ) || type; eventType = events[ type ] || []; origCount = eventType.length; namespaces = namespaces ? new RegExp("(^|\\.)" + namespaces.split(".").sort().join("\\.(?:.*\\.|)") + "(\\.|$)") : null; // Remove matching events for ( j = 0; j < eventType.length; j++ ) { handleObj = eventType[ j ]; if ( ( mappedTypes || origType === handleObj.origType ) && ( !handler || handler.guid === handleObj.guid ) && ( !namespaces || namespaces.test( handleObj.namespace ) ) && ( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) { eventType.splice( j--, 1 ); if ( handleObj.selector ) { eventType.delegateCount--; } if ( special.remove ) { special.remove.call( elem, handleObj ); } } } // Remove generic event handler if we removed something and no more handlers exist // (avoids potential for endless recursion during removal of special event handlers) if ( eventType.length === 0 && origCount !== eventType.length ) { if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) { jQuery.removeEvent( elem, type, elemData.handle ); } delete events[ type ]; } } // Remove the expando if it's no longer used if ( jQuery.isEmptyObject( events ) ) { delete elemData.handle; // removeData also checks for emptiness and clears the expando if empty // so use it instead of delete jQuery.removeData( elem, "events", true ); } }, // Events that are safe to short-circuit if no handlers are attached. // Native DOM events should not be added, they may have inline handlers. customEvent: { "getData": true, "setData": true, "changeData": true }, trigger: function( event, data, elem, onlyHandlers ) { // Don't do events on text and comment nodes if ( elem && (elem.nodeType === 3 || elem.nodeType === 8) ) { return; } // Event object or event type var cache, exclusive, i, cur, old, ontype, special, handle, eventPath, bubbleType, type = event.type || event, namespaces = []; // focus/blur morphs to focusin/out; ensure we're not firing them right now if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { return; } if ( type.indexOf( "!" ) >= 0 ) { // Exclusive events trigger only for the exact event (no namespaces) type = type.slice(0, -1); exclusive = true; } if ( type.indexOf( "." ) >= 0 ) { // Namespaced trigger; create a regexp to match event type in handle() namespaces = type.split("."); type = namespaces.shift(); namespaces.sort(); } if ( (!elem || jQuery.event.customEvent[ type ]) && !jQuery.event.global[ type ] ) { // No jQuery handlers for this event type, and it can't have inline handlers return; } // Caller can pass in an Event, Object, or just an event type string event = typeof event === "object" ? // jQuery.Event object event[ jQuery.expando ] ? event : // Object literal new jQuery.Event( type, event ) : // Just the event type (string) new jQuery.Event( type ); event.type = type; event.isTrigger = true; event.exclusive = exclusive; event.namespace = namespaces.join( "." ); event.namespace_re = event.namespace? new RegExp("(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)") : null; ontype = type.indexOf( ":" ) < 0 ? "on" + type : ""; // Handle a global trigger if ( !elem ) { // TODO: Stop taunting the data cache; remove global events and always attach to document cache = jQuery.cache; for ( i in cache ) { if ( cache[ i ].events && cache[ i ].events[ type ] ) { jQuery.event.trigger( event, data, cache[ i ].handle.elem, true ); } } return; } // Clean up the event in case it is being reused event.result = undefined; if ( !event.target ) { event.target = elem; } // Clone any incoming data and prepend the event, creating the handler arg list data = data != null ? jQuery.makeArray( data ) : []; data.unshift( event ); // Allow special events to draw outside the lines special = jQuery.event.special[ type ] || {}; if ( special.trigger && special.trigger.apply( elem, data ) === false ) { return; } // Determine event propagation path in advance, per W3C events spec (#9951) // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) eventPath = [[ elem, special.bindType || type ]]; if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { bubbleType = special.delegateType || type; cur = rfocusMorph.test( bubbleType + type ) ? elem : elem.parentNode; for ( old = elem; cur; cur = cur.parentNode ) { eventPath.push([ cur, bubbleType ]); old = cur; } // Only add window if we got to document (e.g., not plain obj or detached DOM) if ( old === (elem.ownerDocument || document) ) { eventPath.push([ old.defaultView || old.parentWindow || window, bubbleType ]); } } // Fire handlers on the event path for ( i = 0; i < eventPath.length && !event.isPropagationStopped(); i++ ) { cur = eventPath[i][0]; event.type = eventPath[i][1]; handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] && jQuery._data( cur, "handle" ); if ( handle ) { handle.apply( cur, data ); } // Note that this is a bare JS function and not a jQuery handler handle = ontype && cur[ ontype ]; if ( handle && jQuery.acceptData( cur ) && handle.apply && handle.apply( cur, data ) === false ) { event.preventDefault(); } } event.type = type; // If nobody prevented the default action, do it now if ( !onlyHandlers && !event.isDefaultPrevented() ) { if ( (!special._default || special._default.apply( elem.ownerDocument, data ) === false) && !(type === "click" && jQuery.nodeName( elem, "a" )) && jQuery.acceptData( elem ) ) { // Call a native DOM method on the target with the same name name as the event. // Can't use an .isFunction() check here because IE6/7 fails that test. // Don't do default actions on window, that's where global variables be (#6170) // IE<9 dies on focus/blur to hidden element (#1486) if ( ontype && elem[ type ] && ((type !== "focus" && type !== "blur") || event.target.offsetWidth !== 0) && !jQuery.isWindow( elem ) ) { // Don't re-trigger an onFOO event when we call its FOO() method old = elem[ ontype ]; if ( old ) { elem[ ontype ] = null; } // Prevent re-triggering of the same event, since we already bubbled it above jQuery.event.triggered = type; elem[ type ](); jQuery.event.triggered = undefined; if ( old ) { elem[ ontype ] = old; } } } } return event.result; }, dispatch: function( event ) { // Make a writable jQuery.Event from the native event object event = jQuery.event.fix( event || window.event ); var i, j, cur, ret, selMatch, matched, matches, handleObj, sel, related, handlers = ( (jQuery._data( this, "events" ) || {} )[ event.type ] || []), delegateCount = handlers.delegateCount, args = core_slice.call( arguments ), run_all = !event.exclusive && !event.namespace, special = jQuery.event.special[ event.type ] || {}, handlerQueue = []; // Use the fix-ed jQuery.Event rather than the (read-only) native event args[0] = event; event.delegateTarget = this; // Call the preDispatch hook for the mapped type, and let it bail if desired if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { return; } // Determine handlers that should run if there are delegated events // Avoid non-left-click bubbling in Firefox (#3861) if ( delegateCount && !(event.button && event.type === "click") ) { for ( cur = event.target; cur != this; cur = cur.parentNode || this ) { // Don't process clicks (ONLY) on disabled elements (#6911, #8165, #11382, #11764) if ( cur.disabled !== true || event.type !== "click" ) { selMatch = {}; matches = []; for ( i = 0; i < delegateCount; i++ ) { handleObj = handlers[ i ]; sel = handleObj.selector; if ( selMatch[ sel ] === undefined ) { selMatch[ sel ] = handleObj.needsContext ? jQuery( sel, this ).index( cur ) >= 0 : jQuery.find( sel, this, null, [ cur ] ).length; } if ( selMatch[ sel ] ) { matches.push( handleObj ); } } if ( matches.length ) { handlerQueue.push({ elem: cur, matches: matches }); } } } } // Add the remaining (directly-bound) handlers if ( handlers.length > delegateCount ) { handlerQueue.push({ elem: this, matches: handlers.slice( delegateCount ) }); } // Run delegates first; they may want to stop propagation beneath us for ( i = 0; i < handlerQueue.length && !event.isPropagationStopped(); i++ ) { matched = handlerQueue[ i ]; event.currentTarget = matched.elem; for ( j = 0; j < matched.matches.length && !event.isImmediatePropagationStopped(); j++ ) { handleObj = matched.matches[ j ]; // Triggered event must either 1) be non-exclusive and have no namespace, or // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace). if ( run_all || (!event.namespace && !handleObj.namespace) || event.namespace_re && event.namespace_re.test( handleObj.namespace ) ) { event.data = handleObj.data; event.handleObj = handleObj; ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler ) .apply( matched.elem, args ); if ( ret !== undefined ) { event.result = ret; if ( ret === false ) { event.preventDefault(); event.stopPropagation(); } } } } } // Call the postDispatch hook for the mapped type if ( special.postDispatch ) { special.postDispatch.call( this, event ); } return event.result; }, // Includes some event props shared by KeyEvent and MouseEvent // *** attrChange attrName relatedNode srcElement are not normalized, non-W3C, deprecated, will be removed in 1.8 *** props: "attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "), fixHooks: {}, keyHooks: { props: "char charCode key keyCode".split(" "), filter: function( event, original ) { // Add which for key events if ( event.which == null ) { event.which = original.charCode != null ? original.charCode : original.keyCode; } return event; } }, mouseHooks: { props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "), filter: function( event, original ) { var eventDoc, doc, body, button = original.button, fromElement = original.fromElement; // Calculate pageX/Y if missing and clientX/Y available if ( event.pageX == null && original.clientX != null ) { eventDoc = event.target.ownerDocument || document; doc = eventDoc.documentElement; body = eventDoc.body; event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 ); event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 ); } // Add relatedTarget, if necessary if ( !event.relatedTarget && fromElement ) { event.relatedTarget = fromElement === event.target ? original.toElement : fromElement; } // Add which for click: 1 === left; 2 === middle; 3 === right // Note: button is not normalized, so don't use it if ( !event.which && button !== undefined ) { event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) ); } return event; } }, fix: function( event ) { if ( event[ jQuery.expando ] ) { return event; } // Create a writable copy of the event object and normalize some properties var i, prop, originalEvent = event, fixHook = jQuery.event.fixHooks[ event.type ] || {}, copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props; event = jQuery.Event( originalEvent ); for ( i = copy.length; i; ) { prop = copy[ --i ]; event[ prop ] = originalEvent[ prop ]; } // Fix target property, if necessary (#1925, IE 6/7/8 & Safari2) if ( !event.target ) { event.target = originalEvent.srcElement || document; } // Target should not be a text node (#504, Safari) if ( event.target.nodeType === 3 ) { event.target = event.target.parentNode; } // For mouse/key events, metaKey==false if it's undefined (#3368, #11328; IE6/7/8) event.metaKey = !!event.metaKey; return fixHook.filter? fixHook.filter( event, originalEvent ) : event; }, special: { load: { // Prevent triggered image.load events from bubbling to window.load noBubble: true }, focus: { delegateType: "focusin" }, blur: { delegateType: "focusout" }, beforeunload: { setup: function( data, namespaces, eventHandle ) { // We only want to do this special case on windows if ( jQuery.isWindow( this ) ) { this.onbeforeunload = eventHandle; } }, teardown: function( namespaces, eventHandle ) { if ( this.onbeforeunload === eventHandle ) { this.onbeforeunload = null; } } } }, simulate: function( type, elem, event, bubble ) { // Piggyback on a donor event to simulate a different one. // Fake originalEvent to avoid donor's stopPropagation, but if the // simulated event prevents default then we do the same on the donor. var e = jQuery.extend( new jQuery.Event(), event, { type: type, isSimulated: true, originalEvent: {} } ); if ( bubble ) { jQuery.event.trigger( e, null, elem ); } else { jQuery.event.dispatch.call( elem, e ); } if ( e.isDefaultPrevented() ) { event.preventDefault(); } } }; // Some plugins are using, but it's undocumented/deprecated and will be removed. // The 1.7 special event interface should provide all the hooks needed now. jQuery.event.handle = jQuery.event.dispatch; jQuery.removeEvent = document.removeEventListener ? function( elem, type, handle ) { if ( elem.removeEventListener ) { elem.removeEventListener( type, handle, false ); } } : function( elem, type, handle ) { var name = "on" + type; if ( elem.detachEvent ) { // #8545, #7054, preventing memory leaks for custom events in IE6-8 // detachEvent needed property on element, by name of that event, to properly expose it to GC if ( typeof elem[ name ] === "undefined" ) { elem[ name ] = null; } elem.detachEvent( name, handle ); } }; jQuery.Event = function( src, props ) { // Allow instantiation without the 'new' keyword if ( !(this instanceof jQuery.Event) ) { return new jQuery.Event( src, props ); } // Event object if ( src && src.type ) { this.originalEvent = src; this.type = src.type; // Events bubbling up the document may have been marked as prevented // by a handler lower down the tree; reflect the correct value. this.isDefaultPrevented = ( src.defaultPrevented || src.returnValue === false || src.getPreventDefault && src.getPreventDefault() ) ? returnTrue : returnFalse; // Event type } else { this.type = src; } // Put explicitly provided properties onto the event object if ( props ) { jQuery.extend( this, props ); } // Create a timestamp if incoming event doesn't have one this.timeStamp = src && src.timeStamp || jQuery.now(); // Mark it as fixed this[ jQuery.expando ] = true; }; function returnFalse() { return false; } function returnTrue() { return true; } // jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding // http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html jQuery.Event.prototype = { preventDefault: function() { this.isDefaultPrevented = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if preventDefault exists run it on the original event if ( e.preventDefault ) { e.preventDefault(); // otherwise set the returnValue property of the original event to false (IE) } else { e.returnValue = false; } }, stopPropagation: function() { this.isPropagationStopped = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if stopPropagation exists run it on the original event if ( e.stopPropagation ) { e.stopPropagation(); } // otherwise set the cancelBubble property of the original event to true (IE) e.cancelBubble = true; }, stopImmediatePropagation: function() { this.isImmediatePropagationStopped = returnTrue; this.stopPropagation(); }, isDefaultPrevented: returnFalse, isPropagationStopped: returnFalse, isImmediatePropagationStopped: returnFalse }; // Create mouseenter/leave events using mouseover/out and event-time checks jQuery.each({ mouseenter: "mouseover", mouseleave: "mouseout" }, function( orig, fix ) { jQuery.event.special[ orig ] = { delegateType: fix, bindType: fix, handle: function( event ) { var ret, target = this, related = event.relatedTarget, handleObj = event.handleObj, selector = handleObj.selector; // For mousenter/leave call the handler if related is outside the target. // NB: No relatedTarget if the mouse left/entered the browser window if ( !related || (related !== target && !jQuery.contains( target, related )) ) { event.type = handleObj.origType; ret = handleObj.handler.apply( this, arguments ); event.type = fix; } return ret; } }; }); // IE submit delegation if ( !jQuery.support.submitBubbles ) { jQuery.event.special.submit = { setup: function() { // Only need this for delegated form submit events if ( jQuery.nodeName( this, "form" ) ) { return false; } // Lazy-add a submit handler when a descendant form may potentially be submitted jQuery.event.add( this, "click._submit keypress._submit", function( e ) { // Node name check avoids a VML-related crash in IE (#9807) var elem = e.target, form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ? elem.form : undefined; if ( form && !jQuery._data( form, "_submit_attached" ) ) { jQuery.event.add( form, "submit._submit", function( event ) { event._submit_bubble = true; }); jQuery._data( form, "_submit_attached", true ); } }); // return undefined since we don't need an event listener }, postDispatch: function( event ) { // If form was submitted by the user, bubble the event up the tree if ( event._submit_bubble ) { delete event._submit_bubble; if ( this.parentNode && !event.isTrigger ) { jQuery.event.simulate( "submit", this.parentNode, event, true ); } } }, teardown: function() { // Only need this for delegated form submit events if ( jQuery.nodeName( this, "form" ) ) { return false; } // Remove delegated handlers; cleanData eventually reaps submit handlers attached above jQuery.event.remove( this, "._submit" ); } }; } // IE change delegation and checkbox/radio fix if ( !jQuery.support.changeBubbles ) { jQuery.event.special.change = { setup: function() { if ( rformElems.test( this.nodeName ) ) { // IE doesn't fire change on a check/radio until blur; trigger it on click // after a propertychange. Eat the blur-change in special.change.handle. // This still fires onchange a second time for check/radio after blur. if ( this.type === "checkbox" || this.type === "radio" ) { jQuery.event.add( this, "propertychange._change", function( event ) { if ( event.originalEvent.propertyName === "checked" ) { this._just_changed = true; } }); jQuery.event.add( this, "click._change", function( event ) { if ( this._just_changed && !event.isTrigger ) { this._just_changed = false; } // Allow triggered, simulated change events (#11500) jQuery.event.simulate( "change", this, event, true ); }); } return false; } // Delegated event; lazy-add a change handler on descendant inputs jQuery.event.add( this, "beforeactivate._change", function( e ) { var elem = e.target; if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "_change_attached" ) ) { jQuery.event.add( elem, "change._change", function( event ) { if ( this.parentNode && !event.isSimulated && !event.isTrigger ) { jQuery.event.simulate( "change", this.parentNode, event, true ); } }); jQuery._data( elem, "_change_attached", true ); } }); }, handle: function( event ) { var elem = event.target; // Swallow native change events from checkbox/radio, we already triggered them above if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== "radio" && elem.type !== "checkbox") ) { return event.handleObj.handler.apply( this, arguments ); } }, teardown: function() { jQuery.event.remove( this, "._change" ); return !rformElems.test( this.nodeName ); } }; } // Create "bubbling" focus and blur events if ( !jQuery.support.focusinBubbles ) { jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { // Attach a single capturing handler while someone wants focusin/focusout var attaches = 0, handler = function( event ) { jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true ); }; jQuery.event.special[ fix ] = { setup: function() { if ( attaches++ === 0 ) { document.addEventListener( orig, handler, true ); } }, teardown: function() { if ( --attaches === 0 ) { document.removeEventListener( orig, handler, true ); } } }; }); } jQuery.fn.extend({ on: function( types, selector, data, fn, /*INTERNAL*/ one ) { var origFn, type; // Types can be a map of types/handlers if ( typeof types === "object" ) { // ( types-Object, selector, data ) if ( typeof selector !== "string" ) { // && selector != null // ( types-Object, data ) data = data || selector; selector = undefined; } for ( type in types ) { this.on( type, selector, data, types[ type ], one ); } return this; } if ( data == null && fn == null ) { // ( types, fn ) fn = selector; data = selector = undefined; } else if ( fn == null ) { if ( typeof selector === "string" ) { // ( types, selector, fn ) fn = data; data = undefined; } else { // ( types, data, fn ) fn = data; data = selector; selector = undefined; } } if ( fn === false ) { fn = returnFalse; } else if ( !fn ) { return this; } if ( one === 1 ) { origFn = fn; fn = function( event ) { // Can use an empty set, since event contains the info jQuery().off( event ); return origFn.apply( this, arguments ); }; // Use same guid so caller can remove using origFn fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); } return this.each( function() { jQuery.event.add( this, types, fn, data, selector ); }); }, one: function( types, selector, data, fn ) { return this.on( types, selector, data, fn, 1 ); }, off: function( types, selector, fn ) { var handleObj, type; if ( types && types.preventDefault && types.handleObj ) { // ( event ) dispatched jQuery.Event handleObj = types.handleObj; jQuery( types.delegateTarget ).off( handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType, handleObj.selector, handleObj.handler ); return this; } if ( typeof types === "object" ) { // ( types-object [, selector] ) for ( type in types ) { this.off( type, selector, types[ type ] ); } return this; } if ( selector === false || typeof selector === "function" ) { // ( types [, fn] ) fn = selector; selector = undefined; } if ( fn === false ) { fn = returnFalse; } return this.each(function() { jQuery.event.remove( this, types, fn, selector ); }); }, bind: function( types, data, fn ) { return this.on( types, null, data, fn ); }, unbind: function( types, fn ) { return this.off( types, null, fn ); }, live: function( types, data, fn ) { jQuery( this.context ).on( types, this.selector, data, fn ); return this; }, die: function( types, fn ) { jQuery( this.context ).off( types, this.selector || "**", fn ); return this; }, delegate: function( selector, types, data, fn ) { return this.on( types, selector, data, fn ); }, undelegate: function( selector, types, fn ) { // ( namespace ) or ( selector, types [, fn] ) return arguments.length === 1 ? this.off( selector, "**" ) : this.off( types, selector || "**", fn ); }, trigger: function( type, data ) { return this.each(function() { jQuery.event.trigger( type, data, this ); }); }, triggerHandler: function( type, data ) { if ( this[0] ) { return jQuery.event.trigger( type, data, this[0], true ); } }, toggle: function( fn ) { // Save reference to arguments for access in closure var args = arguments, guid = fn.guid || jQuery.guid++, i = 0, toggler = function( event ) { // Figure out which function to execute var lastToggle = ( jQuery._data( this, "lastToggle" + fn.guid ) || 0 ) % i; jQuery._data( this, "lastToggle" + fn.guid, lastToggle + 1 ); // Make sure that clicks stop event.preventDefault(); // and execute the function return args[ lastToggle ].apply( this, arguments ) || false; }; // link all the functions, so any of them can unbind this click handler toggler.guid = guid; while ( i < args.length ) { args[ i++ ].guid = guid; } return this.click( toggler ); }, hover: function( fnOver, fnOut ) { return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); } }); jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " + "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + "change select submit keydown keypress keyup error contextmenu").split(" "), function( i, name ) { // Handle event binding jQuery.fn[ name ] = function( data, fn ) { if ( fn == null ) { fn = data; data = null; } return arguments.length > 0 ? this.on( name, null, data, fn ) : this.trigger( name ); }; if ( rkeyEvent.test( name ) ) { jQuery.event.fixHooks[ name ] = jQuery.event.keyHooks; } if ( rmouseEvent.test( name ) ) { jQuery.event.fixHooks[ name ] = jQuery.event.mouseHooks; } }); /*! * Sizzle CSS Selector Engine * Copyright 2012 jQuery Foundation and other contributors * Released under the MIT license * http://sizzlejs.com/ */ (function( window, undefined ) { var cachedruns, assertGetIdNotName, Expr, getText, isXML, contains, compile, sortOrder, hasDuplicate, outermostContext, baseHasDuplicate = true, strundefined = "undefined", expando = ( "sizcache" + Math.random() ).replace( ".", "" ), Token = String, document = window.document, docElem = document.documentElement, dirruns = 0, done = 0, pop = [].pop, push = [].push, slice = [].slice, // Use a stripped-down indexOf if a native one is unavailable indexOf = [].indexOf || function( elem ) { var i = 0, len = this.length; for ( ; i < len; i++ ) { if ( this[i] === elem ) { return i; } } return -1; }, // Augment a function for special use by Sizzle markFunction = function( fn, value ) { fn[ expando ] = value == null || value; return fn; }, createCache = function() { var cache = {}, keys = []; return markFunction(function( key, value ) { // Only keep the most recent entries if ( keys.push( key ) > Expr.cacheLength ) { delete cache[ keys.shift() ]; } // Retrieve with (key + " ") to avoid collision with native Object.prototype properties (see Issue #157) return (cache[ key + " " ] = value); }, cache ); }, classCache = createCache(), tokenCache = createCache(), compilerCache = createCache(), // Regex // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace whitespace = "[\\x20\\t\\r\\n\\f]", // http://www.w3.org/TR/css3-syntax/#characters characterEncoding = "(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+", // Loosely modeled on CSS identifier characters // An unquoted value should be a CSS identifier (http://www.w3.org/TR/css3-selectors/#attribute-selectors) // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier identifier = characterEncoding.replace( "w", "w#" ), // Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors operators = "([*^$|!~]?=)", attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace + "*(?:" + operators + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]", // Prefer arguments not in parens/brackets, // then attribute selectors and non-pseudos (denoted by :), // then anything else // These preferences are here to reduce the number of selectors // needing tokenize in the PSEUDO preFilter pseudos = ":(" + characterEncoding + ")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:" + attributes + ")|[^:]|\\\\.)*|.*))\\)|)", // For matchExpr.POS and matchExpr.needsContext pos = ":(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), rcombinators = new RegExp( "^" + whitespace + "*([\\x20\\t\\r\\n\\f>+~])" + whitespace + "*" ), rpseudo = new RegExp( pseudos ), // Easily-parseable/retrievable ID or TAG or CLASS selectors rquickExpr = /^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/, rnot = /^:not/, rsibling = /[\x20\t\r\n\f]*[+~]/, rendsWithNot = /:not\($/, rheader = /h\d/i, rinputs = /input|select|textarea|button/i, rbackslash = /\\(?!\\)/g, matchExpr = { "ID": new RegExp( "^#(" + characterEncoding + ")" ), "CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ), "NAME": new RegExp( "^\\[name=['\"]?(" + characterEncoding + ")['\"]?\\]" ), "TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ), "ATTR": new RegExp( "^" + attributes ), "PSEUDO": new RegExp( "^" + pseudos ), "POS": new RegExp( pos, "i" ), "CHILD": new RegExp( "^:(only|nth|first|last)-child(?:\\(" + whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), // For use in libraries implementing .is() "needsContext": new RegExp( "^" + whitespace + "*[>+~]|" + pos, "i" ) }, // Support // Used for testing something on an element assert = function( fn ) { var div = document.createElement("div"); try { return fn( div ); } catch (e) { return false; } finally { // release memory in IE div = null; } }, // Check if getElementsByTagName("*") returns only elements assertTagNameNoComments = assert(function( div ) { div.appendChild( document.createComment("") ); return !div.getElementsByTagName("*").length; }), // Check if getAttribute returns normalized href attributes assertHrefNotNormalized = assert(function( div ) { div.innerHTML = ""; return div.firstChild && typeof div.firstChild.getAttribute !== strundefined && div.firstChild.getAttribute("href") === "#"; }), // Check if attributes should be retrieved by attribute nodes assertAttributes = assert(function( div ) { div.innerHTML = ""; var type = typeof div.lastChild.getAttribute("multiple"); // IE8 returns a string for some attributes even when not present return type !== "boolean" && type !== "string"; }), // Check if getElementsByClassName can be trusted assertUsableClassName = assert(function( div ) { // Opera can't find a second classname (in 9.6) div.innerHTML = ""; if ( !div.getElementsByClassName || !div.getElementsByClassName("e").length ) { return false; } // Safari 3.2 caches class attributes and doesn't catch changes div.lastChild.className = "e"; return div.getElementsByClassName("e").length === 2; }), // Check if getElementById returns elements by name // Check if getElementsByName privileges form controls or returns elements by ID assertUsableName = assert(function( div ) { // Inject content div.id = expando + 0; div.innerHTML = "
"; docElem.insertBefore( div, docElem.firstChild ); // Test var pass = document.getElementsByName && // buggy browsers will return fewer than the correct 2 document.getElementsByName( expando ).length === 2 + // buggy browsers will return more than the correct 0 document.getElementsByName( expando + 0 ).length; assertGetIdNotName = !document.getElementById( expando ); // Cleanup docElem.removeChild( div ); return pass; }); // If slice is not available, provide a backup try { slice.call( docElem.childNodes, 0 )[0].nodeType; } catch ( e ) { slice = function( i ) { var elem, results = []; for ( ; (elem = this[i]); i++ ) { results.push( elem ); } return results; }; } function Sizzle( selector, context, results, seed ) { results = results || []; context = context || document; var match, elem, xml, m, nodeType = context.nodeType; if ( !selector || typeof selector !== "string" ) { return results; } if ( nodeType !== 1 && nodeType !== 9 ) { return []; } xml = isXML( context ); if ( !xml && !seed ) { if ( (match = rquickExpr.exec( selector )) ) { // Speed-up: Sizzle("#ID") if ( (m = match[1]) ) { if ( nodeType === 9 ) { elem = context.getElementById( m ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE, Opera, and Webkit return items // by name instead of ID if ( elem.id === m ) { results.push( elem ); return results; } } else { return results; } } else { // Context is not a document if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) && contains( context, elem ) && elem.id === m ) { results.push( elem ); return results; } } // Speed-up: Sizzle("TAG") } else if ( match[2] ) { push.apply( results, slice.call(context.getElementsByTagName( selector ), 0) ); return results; // Speed-up: Sizzle(".CLASS") } else if ( (m = match[3]) && assertUsableClassName && context.getElementsByClassName ) { push.apply( results, slice.call(context.getElementsByClassName( m ), 0) ); return results; } } } // All others return select( selector.replace( rtrim, "$1" ), context, results, seed, xml ); } Sizzle.matches = function( expr, elements ) { return Sizzle( expr, null, null, elements ); }; Sizzle.matchesSelector = function( elem, expr ) { return Sizzle( expr, null, null, [ elem ] ).length > 0; }; // Returns a function to use in pseudos for input types function createInputPseudo( type ) { return function( elem ) { var name = elem.nodeName.toLowerCase(); return name === "input" && elem.type === type; }; } // Returns a function to use in pseudos for buttons function createButtonPseudo( type ) { return function( elem ) { var name = elem.nodeName.toLowerCase(); return (name === "input" || name === "button") && elem.type === type; }; } // Returns a function to use in pseudos for positionals function createPositionalPseudo( fn ) { return markFunction(function( argument ) { argument = +argument; return markFunction(function( seed, matches ) { var j, matchIndexes = fn( [], seed.length, argument ), i = matchIndexes.length; // Match elements found at the specified indexes while ( i-- ) { if ( seed[ (j = matchIndexes[i]) ] ) { seed[j] = !(matches[j] = seed[j]); } } }); }); } /** * Utility function for retrieving the text value of an array of DOM nodes * @param {Array|Element} elem */ getText = Sizzle.getText = function( elem ) { var node, ret = "", i = 0, nodeType = elem.nodeType; if ( nodeType ) { if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { // Use textContent for elements // innerText usage removed for consistency of new lines (see #11153) if ( typeof elem.textContent === "string" ) { return elem.textContent; } else { // Traverse its children for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { ret += getText( elem ); } } } else if ( nodeType === 3 || nodeType === 4 ) { return elem.nodeValue; } // Do not include comment or processing instruction nodes } else { // If no nodeType, this is expected to be an array for ( ; (node = elem[i]); i++ ) { // Do not traverse comment nodes ret += getText( node ); } } return ret; }; isXML = Sizzle.isXML = function( elem ) { // documentElement is verified for cases where it doesn't yet exist // (such as loading iframes in IE - #4833) var documentElement = elem && (elem.ownerDocument || elem).documentElement; return documentElement ? documentElement.nodeName !== "HTML" : false; }; // Element contains another contains = Sizzle.contains = docElem.contains ? function( a, b ) { var adown = a.nodeType === 9 ? a.documentElement : a, bup = b && b.parentNode; return a === bup || !!( bup && bup.nodeType === 1 && adown.contains && adown.contains(bup) ); } : docElem.compareDocumentPosition ? function( a, b ) { return b && !!( a.compareDocumentPosition( b ) & 16 ); } : function( a, b ) { while ( (b = b.parentNode) ) { if ( b === a ) { return true; } } return false; }; Sizzle.attr = function( elem, name ) { var val, xml = isXML( elem ); if ( !xml ) { name = name.toLowerCase(); } if ( (val = Expr.attrHandle[ name ]) ) { return val( elem ); } if ( xml || assertAttributes ) { return elem.getAttribute( name ); } val = elem.getAttributeNode( name ); return val ? typeof elem[ name ] === "boolean" ? elem[ name ] ? name : null : val.specified ? val.value : null : null; }; Expr = Sizzle.selectors = { // Can be adjusted by the user cacheLength: 50, createPseudo: markFunction, match: matchExpr, // IE6/7 return a modified href attrHandle: assertHrefNotNormalized ? {} : { "href": function( elem ) { return elem.getAttribute( "href", 2 ); }, "type": function( elem ) { return elem.getAttribute("type"); } }, find: { "ID": assertGetIdNotName ? function( id, context, xml ) { if ( typeof context.getElementById !== strundefined && !xml ) { var m = context.getElementById( id ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 return m && m.parentNode ? [m] : []; } } : function( id, context, xml ) { if ( typeof context.getElementById !== strundefined && !xml ) { var m = context.getElementById( id ); return m ? m.id === id || typeof m.getAttributeNode !== strundefined && m.getAttributeNode("id").value === id ? [m] : undefined : []; } }, "TAG": assertTagNameNoComments ? function( tag, context ) { if ( typeof context.getElementsByTagName !== strundefined ) { return context.getElementsByTagName( tag ); } } : function( tag, context ) { var results = context.getElementsByTagName( tag ); // Filter out possible comments if ( tag === "*" ) { var elem, tmp = [], i = 0; for ( ; (elem = results[i]); i++ ) { if ( elem.nodeType === 1 ) { tmp.push( elem ); } } return tmp; } return results; }, "NAME": assertUsableName && function( tag, context ) { if ( typeof context.getElementsByName !== strundefined ) { return context.getElementsByName( name ); } }, "CLASS": assertUsableClassName && function( className, context, xml ) { if ( typeof context.getElementsByClassName !== strundefined && !xml ) { return context.getElementsByClassName( className ); } } }, relative: { ">": { dir: "parentNode", first: true }, " ": { dir: "parentNode" }, "+": { dir: "previousSibling", first: true }, "~": { dir: "previousSibling" } }, preFilter: { "ATTR": function( match ) { match[1] = match[1].replace( rbackslash, "" ); // Move the given value to match[3] whether quoted or unquoted match[3] = ( match[4] || match[5] || "" ).replace( rbackslash, "" ); if ( match[2] === "~=" ) { match[3] = " " + match[3] + " "; } return match.slice( 0, 4 ); }, "CHILD": function( match ) { /* matches from matchExpr["CHILD"] 1 type (only|nth|...) 2 argument (even|odd|\d*|\d*n([+-]\d+)?|...) 3 xn-component of xn+y argument ([+-]?\d*n|) 4 sign of xn-component 5 x of xn-component 6 sign of y-component 7 y of y-component */ match[1] = match[1].toLowerCase(); if ( match[1] === "nth" ) { // nth-child requires argument if ( !match[2] ) { Sizzle.error( match[0] ); } // numeric x and y parameters for Expr.filter.CHILD // remember that false/true cast respectively to 0/1 match[3] = +( match[3] ? match[4] + (match[5] || 1) : 2 * ( match[2] === "even" || match[2] === "odd" ) ); match[4] = +( ( match[6] + match[7] ) || match[2] === "odd" ); // other types prohibit arguments } else if ( match[2] ) { Sizzle.error( match[0] ); } return match; }, "PSEUDO": function( match ) { var unquoted, excess; if ( matchExpr["CHILD"].test( match[0] ) ) { return null; } if ( match[3] ) { match[2] = match[3]; } else if ( (unquoted = match[4]) ) { // Only check arguments that contain a pseudo if ( rpseudo.test(unquoted) && // Get excess from tokenize (recursively) (excess = tokenize( unquoted, true )) && // advance to the next closing parenthesis (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { // excess is a negative index unquoted = unquoted.slice( 0, excess ); match[0] = match[0].slice( 0, excess ); } match[2] = unquoted; } // Return only captures needed by the pseudo filter method (type and argument) return match.slice( 0, 3 ); } }, filter: { "ID": assertGetIdNotName ? function( id ) { id = id.replace( rbackslash, "" ); return function( elem ) { return elem.getAttribute("id") === id; }; } : function( id ) { id = id.replace( rbackslash, "" ); return function( elem ) { var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id"); return node && node.value === id; }; }, "TAG": function( nodeName ) { if ( nodeName === "*" ) { return function() { return true; }; } nodeName = nodeName.replace( rbackslash, "" ).toLowerCase(); return function( elem ) { return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; }; }, "CLASS": function( className ) { var pattern = classCache[ expando ][ className + " " ]; return pattern || (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && classCache( className, function( elem ) { return pattern.test( elem.className || (typeof elem.getAttribute !== strundefined && elem.getAttribute("class")) || "" ); }); }, "ATTR": function( name, operator, check ) { return function( elem, context ) { var result = Sizzle.attr( elem, name ); if ( result == null ) { return operator === "!="; } if ( !operator ) { return true; } result += ""; return operator === "=" ? result === check : operator === "!=" ? result !== check : operator === "^=" ? check && result.indexOf( check ) === 0 : operator === "*=" ? check && result.indexOf( check ) > -1 : operator === "$=" ? check && result.substr( result.length - check.length ) === check : operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 : operator === "|=" ? result === check || result.substr( 0, check.length + 1 ) === check + "-" : false; }; }, "CHILD": function( type, argument, first, last ) { if ( type === "nth" ) { return function( elem ) { var node, diff, parent = elem.parentNode; if ( first === 1 && last === 0 ) { return true; } if ( parent ) { diff = 0; for ( node = parent.firstChild; node; node = node.nextSibling ) { if ( node.nodeType === 1 ) { diff++; if ( elem === node ) { break; } } } } // Incorporate the offset (or cast to NaN), then check against cycle size diff -= last; return diff === first || ( diff % first === 0 && diff / first >= 0 ); }; } return function( elem ) { var node = elem; switch ( type ) { case "only": case "first": while ( (node = node.previousSibling) ) { if ( node.nodeType === 1 ) { return false; } } if ( type === "first" ) { return true; } node = elem; /* falls through */ case "last": while ( (node = node.nextSibling) ) { if ( node.nodeType === 1 ) { return false; } } return true; } }; }, "PSEUDO": function( pseudo, argument ) { // pseudo-class names are case-insensitive // http://www.w3.org/TR/selectors/#pseudo-classes // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters // Remember that setFilters inherits from pseudos var args, fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || Sizzle.error( "unsupported pseudo: " + pseudo ); // The user may use createPseudo to indicate that // arguments are needed to create the filter function // just as Sizzle does if ( fn[ expando ] ) { return fn( argument ); } // But maintain support for old signatures if ( fn.length > 1 ) { args = [ pseudo, pseudo, "", argument ]; return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? markFunction(function( seed, matches ) { var idx, matched = fn( seed, argument ), i = matched.length; while ( i-- ) { idx = indexOf.call( seed, matched[i] ); seed[ idx ] = !( matches[ idx ] = matched[i] ); } }) : function( elem ) { return fn( elem, 0, args ); }; } return fn; } }, pseudos: { "not": markFunction(function( selector ) { // Trim the selector passed to compile // to avoid treating leading and trailing // spaces as combinators var input = [], results = [], matcher = compile( selector.replace( rtrim, "$1" ) ); return matcher[ expando ] ? markFunction(function( seed, matches, context, xml ) { var elem, unmatched = matcher( seed, null, xml, [] ), i = seed.length; // Match elements unmatched by `matcher` while ( i-- ) { if ( (elem = unmatched[i]) ) { seed[i] = !(matches[i] = elem); } } }) : function( elem, context, xml ) { input[0] = elem; matcher( input, null, xml, results ); return !results.pop(); }; }), "has": markFunction(function( selector ) { return function( elem ) { return Sizzle( selector, elem ).length > 0; }; }), "contains": markFunction(function( text ) { return function( elem ) { return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; }; }), "enabled": function( elem ) { return elem.disabled === false; }, "disabled": function( elem ) { return elem.disabled === true; }, "checked": function( elem ) { // In CSS3, :checked should return both checked and selected elements // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked var nodeName = elem.nodeName.toLowerCase(); return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); }, "selected": function( elem ) { // Accessing this property makes selected-by-default // options in Safari work properly if ( elem.parentNode ) { elem.parentNode.selectedIndex; } return elem.selected === true; }, "parent": function( elem ) { return !Expr.pseudos["empty"]( elem ); }, "empty": function( elem ) { // http://www.w3.org/TR/selectors/#empty-pseudo // :empty is only affected by element nodes and content nodes(including text(3), cdata(4)), // not comment, processing instructions, or others // Thanks to Diego Perini for the nodeName shortcut // Greater than "@" means alpha characters (specifically not starting with "#" or "?") var nodeType; elem = elem.firstChild; while ( elem ) { if ( elem.nodeName > "@" || (nodeType = elem.nodeType) === 3 || nodeType === 4 ) { return false; } elem = elem.nextSibling; } return true; }, "header": function( elem ) { return rheader.test( elem.nodeName ); }, "text": function( elem ) { var type, attr; // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) // use getAttribute instead to test this case return elem.nodeName.toLowerCase() === "input" && (type = elem.type) === "text" && ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === type ); }, // Input types "radio": createInputPseudo("radio"), "checkbox": createInputPseudo("checkbox"), "file": createInputPseudo("file"), "password": createInputPseudo("password"), "image": createInputPseudo("image"), "submit": createButtonPseudo("submit"), "reset": createButtonPseudo("reset"), "button": function( elem ) { var name = elem.nodeName.toLowerCase(); return name === "input" && elem.type === "button" || name === "button"; }, "input": function( elem ) { return rinputs.test( elem.nodeName ); }, "focus": function( elem ) { var doc = elem.ownerDocument; return elem === doc.activeElement && (!doc.hasFocus || doc.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); }, "active": function( elem ) { return elem === elem.ownerDocument.activeElement; }, // Positional types "first": createPositionalPseudo(function() { return [ 0 ]; }), "last": createPositionalPseudo(function( matchIndexes, length ) { return [ length - 1 ]; }), "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { return [ argument < 0 ? argument + length : argument ]; }), "even": createPositionalPseudo(function( matchIndexes, length ) { for ( var i = 0; i < length; i += 2 ) { matchIndexes.push( i ); } return matchIndexes; }), "odd": createPositionalPseudo(function( matchIndexes, length ) { for ( var i = 1; i < length; i += 2 ) { matchIndexes.push( i ); } return matchIndexes; }), "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { for ( var i = argument < 0 ? argument + length : argument; --i >= 0; ) { matchIndexes.push( i ); } return matchIndexes; }), "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { for ( var i = argument < 0 ? argument + length : argument; ++i < length; ) { matchIndexes.push( i ); } return matchIndexes; }) } }; function siblingCheck( a, b, ret ) { if ( a === b ) { return ret; } var cur = a.nextSibling; while ( cur ) { if ( cur === b ) { return -1; } cur = cur.nextSibling; } return 1; } sortOrder = docElem.compareDocumentPosition ? function( a, b ) { if ( a === b ) { hasDuplicate = true; return 0; } return ( !a.compareDocumentPosition || !b.compareDocumentPosition ? a.compareDocumentPosition : a.compareDocumentPosition(b) & 4 ) ? -1 : 1; } : function( a, b ) { // The nodes are identical, we can exit early if ( a === b ) { hasDuplicate = true; return 0; // Fallback to using sourceIndex (in IE) if it's available on both nodes } else if ( a.sourceIndex && b.sourceIndex ) { return a.sourceIndex - b.sourceIndex; } var al, bl, ap = [], bp = [], aup = a.parentNode, bup = b.parentNode, cur = aup; // If the nodes are siblings (or identical) we can do a quick check if ( aup === bup ) { return siblingCheck( a, b ); // If no parents were found then the nodes are disconnected } else if ( !aup ) { return -1; } else if ( !bup ) { return 1; } // Otherwise they're somewhere else in the tree so we need // to build up a full list of the parentNodes for comparison while ( cur ) { ap.unshift( cur ); cur = cur.parentNode; } cur = bup; while ( cur ) { bp.unshift( cur ); cur = cur.parentNode; } al = ap.length; bl = bp.length; // Start walking down the tree looking for a discrepancy for ( var i = 0; i < al && i < bl; i++ ) { if ( ap[i] !== bp[i] ) { return siblingCheck( ap[i], bp[i] ); } } // We ended someplace up the tree so do a sibling check return i === al ? siblingCheck( a, bp[i], -1 ) : siblingCheck( ap[i], b, 1 ); }; // Always assume the presence of duplicates if sort doesn't // pass them to our comparison function (as in Google Chrome). [0, 0].sort( sortOrder ); baseHasDuplicate = !hasDuplicate; // Document sorting and removing duplicates Sizzle.uniqueSort = function( results ) { var elem, duplicates = [], i = 1, j = 0; hasDuplicate = baseHasDuplicate; results.sort( sortOrder ); if ( hasDuplicate ) { for ( ; (elem = results[i]); i++ ) { if ( elem === results[ i - 1 ] ) { j = duplicates.push( i ); } } while ( j-- ) { results.splice( duplicates[ j ], 1 ); } } return results; }; Sizzle.error = function( msg ) { throw new Error( "Syntax error, unrecognized expression: " + msg ); }; function tokenize( selector, parseOnly ) { var matched, match, tokens, type, soFar, groups, preFilters, cached = tokenCache[ expando ][ selector + " " ]; if ( cached ) { return parseOnly ? 0 : cached.slice( 0 ); } soFar = selector; groups = []; preFilters = Expr.preFilter; while ( soFar ) { // Comma and first run if ( !matched || (match = rcomma.exec( soFar )) ) { if ( match ) { // Don't consume trailing commas as valid soFar = soFar.slice( match[0].length ) || soFar; } groups.push( tokens = [] ); } matched = false; // Combinators if ( (match = rcombinators.exec( soFar )) ) { tokens.push( matched = new Token( match.shift() ) ); soFar = soFar.slice( matched.length ); // Cast descendant combinators to space matched.type = match[0].replace( rtrim, " " ); } // Filters for ( type in Expr.filter ) { if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || (match = preFilters[ type ]( match ))) ) { tokens.push( matched = new Token( match.shift() ) ); soFar = soFar.slice( matched.length ); matched.type = type; matched.matches = match; } } if ( !matched ) { break; } } // Return the length of the invalid excess // if we're just parsing // Otherwise, throw an error or return tokens return parseOnly ? soFar.length : soFar ? Sizzle.error( selector ) : // Cache the tokens tokenCache( selector, groups ).slice( 0 ); } function addCombinator( matcher, combinator, base ) { var dir = combinator.dir, checkNonElements = base && combinator.dir === "parentNode", doneName = done++; return combinator.first ? // Check against closest ancestor/preceding element function( elem, context, xml ) { while ( (elem = elem[ dir ]) ) { if ( checkNonElements || elem.nodeType === 1 ) { return matcher( elem, context, xml ); } } } : // Check against all ancestor/preceding elements function( elem, context, xml ) { // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching if ( !xml ) { var cache, dirkey = dirruns + " " + doneName + " ", cachedkey = dirkey + cachedruns; while ( (elem = elem[ dir ]) ) { if ( checkNonElements || elem.nodeType === 1 ) { if ( (cache = elem[ expando ]) === cachedkey ) { return elem.sizset; } else if ( typeof cache === "string" && cache.indexOf(dirkey) === 0 ) { if ( elem.sizset ) { return elem; } } else { elem[ expando ] = cachedkey; if ( matcher( elem, context, xml ) ) { elem.sizset = true; return elem; } elem.sizset = false; } } } } else { while ( (elem = elem[ dir ]) ) { if ( checkNonElements || elem.nodeType === 1 ) { if ( matcher( elem, context, xml ) ) { return elem; } } } } }; } function elementMatcher( matchers ) { return matchers.length > 1 ? function( elem, context, xml ) { var i = matchers.length; while ( i-- ) { if ( !matchers[i]( elem, context, xml ) ) { return false; } } return true; } : matchers[0]; } function condense( unmatched, map, filter, context, xml ) { var elem, newUnmatched = [], i = 0, len = unmatched.length, mapped = map != null; for ( ; i < len; i++ ) { if ( (elem = unmatched[i]) ) { if ( !filter || filter( elem, context, xml ) ) { newUnmatched.push( elem ); if ( mapped ) { map.push( i ); } } } } return newUnmatched; } function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { if ( postFilter && !postFilter[ expando ] ) { postFilter = setMatcher( postFilter ); } if ( postFinder && !postFinder[ expando ] ) { postFinder = setMatcher( postFinder, postSelector ); } return markFunction(function( seed, results, context, xml ) { var temp, i, elem, preMap = [], postMap = [], preexisting = results.length, // Get initial elements from seed or context elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), // Prefilter to get matcher input, preserving a map for seed-results synchronization matcherIn = preFilter && ( seed || !selector ) ? condense( elems, preMap, preFilter, context, xml ) : elems, matcherOut = matcher ? // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, postFinder || ( seed ? preFilter : preexisting || postFilter ) ? // ...intermediate processing is necessary [] : // ...otherwise use results directly results : matcherIn; // Find primary matches if ( matcher ) { matcher( matcherIn, matcherOut, context, xml ); } // Apply postFilter if ( postFilter ) { temp = condense( matcherOut, postMap ); postFilter( temp, [], context, xml ); // Un-match failing elements by moving them back to matcherIn i = temp.length; while ( i-- ) { if ( (elem = temp[i]) ) { matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); } } } if ( seed ) { if ( postFinder || preFilter ) { if ( postFinder ) { // Get the final matcherOut by condensing this intermediate into postFinder contexts temp = []; i = matcherOut.length; while ( i-- ) { if ( (elem = matcherOut[i]) ) { // Restore matcherIn since elem is not yet a final match temp.push( (matcherIn[i] = elem) ); } } postFinder( null, (matcherOut = []), temp, xml ); } // Move matched elements from seed to results to keep them synchronized i = matcherOut.length; while ( i-- ) { if ( (elem = matcherOut[i]) && (temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) { seed[temp] = !(results[temp] = elem); } } } // Add elements to results, through postFinder if defined } else { matcherOut = condense( matcherOut === results ? matcherOut.splice( preexisting, matcherOut.length ) : matcherOut ); if ( postFinder ) { postFinder( null, results, matcherOut, xml ); } else { push.apply( results, matcherOut ); } } }); } function matcherFromTokens( tokens ) { var checkContext, matcher, j, len = tokens.length, leadingRelative = Expr.relative[ tokens[0].type ], implicitRelative = leadingRelative || Expr.relative[" "], i = leadingRelative ? 1 : 0, // The foundational matcher ensures that elements are reachable from top-level context(s) matchContext = addCombinator( function( elem ) { return elem === checkContext; }, implicitRelative, true ), matchAnyContext = addCombinator( function( elem ) { return indexOf.call( checkContext, elem ) > -1; }, implicitRelative, true ), matchers = [ function( elem, context, xml ) { return ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( (checkContext = context).nodeType ? matchContext( elem, context, xml ) : matchAnyContext( elem, context, xml ) ); } ]; for ( ; i < len; i++ ) { if ( (matcher = Expr.relative[ tokens[i].type ]) ) { matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; } else { matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); // Return special upon seeing a positional matcher if ( matcher[ expando ] ) { // Find the next relative operator (if any) for proper handling j = ++i; for ( ; j < len; j++ ) { if ( Expr.relative[ tokens[j].type ] ) { break; } } return setMatcher( i > 1 && elementMatcher( matchers ), i > 1 && tokens.slice( 0, i - 1 ).join("").replace( rtrim, "$1" ), matcher, i < j && matcherFromTokens( tokens.slice( i, j ) ), j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), j < len && tokens.join("") ); } matchers.push( matcher ); } } return elementMatcher( matchers ); } function matcherFromGroupMatchers( elementMatchers, setMatchers ) { var bySet = setMatchers.length > 0, byElement = elementMatchers.length > 0, superMatcher = function( seed, context, xml, results, expandContext ) { var elem, j, matcher, setMatched = [], matchedCount = 0, i = "0", unmatched = seed && [], outermost = expandContext != null, contextBackup = outermostContext, // We must always have either seed elements or context elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ), // Nested matchers should use non-integer dirruns dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.E); if ( outermost ) { outermostContext = context !== document && context; cachedruns = superMatcher.el; } // Add elements passing elementMatchers directly to results for ( ; (elem = elems[i]) != null; i++ ) { if ( byElement && elem ) { for ( j = 0; (matcher = elementMatchers[j]); j++ ) { if ( matcher( elem, context, xml ) ) { results.push( elem ); break; } } if ( outermost ) { dirruns = dirrunsUnique; cachedruns = ++superMatcher.el; } } // Track unmatched elements for set filters if ( bySet ) { // They will have gone through all possible matchers if ( (elem = !matcher && elem) ) { matchedCount--; } // Lengthen the array for every element, matched or not if ( seed ) { unmatched.push( elem ); } } } // Apply set filters to unmatched elements matchedCount += i; if ( bySet && i !== matchedCount ) { for ( j = 0; (matcher = setMatchers[j]); j++ ) { matcher( unmatched, setMatched, context, xml ); } if ( seed ) { // Reintegrate element matches to eliminate the need for sorting if ( matchedCount > 0 ) { while ( i-- ) { if ( !(unmatched[i] || setMatched[i]) ) { setMatched[i] = pop.call( results ); } } } // Discard index placeholder values to get only actual matches setMatched = condense( setMatched ); } // Add matches to results push.apply( results, setMatched ); // Seedless set matches succeeding multiple successful matchers stipulate sorting if ( outermost && !seed && setMatched.length > 0 && ( matchedCount + setMatchers.length ) > 1 ) { Sizzle.uniqueSort( results ); } } // Override manipulation of globals by nested matchers if ( outermost ) { dirruns = dirrunsUnique; outermostContext = contextBackup; } return unmatched; }; superMatcher.el = 0; return bySet ? markFunction( superMatcher ) : superMatcher; } compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) { var i, setMatchers = [], elementMatchers = [], cached = compilerCache[ expando ][ selector + " " ]; if ( !cached ) { // Generate a function of recursive functions that can be used to check each element if ( !group ) { group = tokenize( selector ); } i = group.length; while ( i-- ) { cached = matcherFromTokens( group[i] ); if ( cached[ expando ] ) { setMatchers.push( cached ); } else { elementMatchers.push( cached ); } } // Cache the compiled function cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); } return cached; }; function multipleContexts( selector, contexts, results ) { var i = 0, len = contexts.length; for ( ; i < len; i++ ) { Sizzle( selector, contexts[i], results ); } return results; } function select( selector, context, results, seed, xml ) { var i, tokens, token, type, find, match = tokenize( selector ), j = match.length; if ( !seed ) { // Try to minimize operations if there is only one group if ( match.length === 1 ) { // Take a shortcut and set the context if the root selector is an ID tokens = match[0] = match[0].slice( 0 ); if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && context.nodeType === 9 && !xml && Expr.relative[ tokens[1].type ] ) { context = Expr.find["ID"]( token.matches[0].replace( rbackslash, "" ), context, xml )[0]; if ( !context ) { return results; } selector = selector.slice( tokens.shift().length ); } // Fetch a seed set for right-to-left matching for ( i = matchExpr["POS"].test( selector ) ? -1 : tokens.length - 1; i >= 0; i-- ) { token = tokens[i]; // Abort if we hit a combinator if ( Expr.relative[ (type = token.type) ] ) { break; } if ( (find = Expr.find[ type ]) ) { // Search, expanding context for leading sibling combinators if ( (seed = find( token.matches[0].replace( rbackslash, "" ), rsibling.test( tokens[0].type ) && context.parentNode || context, xml )) ) { // If seed is empty or no tokens remain, we can return early tokens.splice( i, 1 ); selector = seed.length && tokens.join(""); if ( !selector ) { push.apply( results, slice.call( seed, 0 ) ); return results; } break; } } } } } // Compile and execute a filtering function // Provide `match` to avoid retokenization if we modified the selector above compile( selector, match )( seed, context, xml, results, rsibling.test( selector ) ); return results; } if ( document.querySelectorAll ) { (function() { var disconnectedMatch, oldSelect = select, rescape = /'|\\/g, rattributeQuotes = /\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g, // qSa(:focus) reports false when true (Chrome 21), no need to also add to buggyMatches since matches checks buggyQSA // A support test would require too much code (would include document ready) rbuggyQSA = [ ":focus" ], // matchesSelector(:active) reports false when true (IE9/Opera 11.5) // A support test would require too much code (would include document ready) // just skip matchesSelector for :active rbuggyMatches = [ ":active" ], matches = docElem.matchesSelector || docElem.mozMatchesSelector || docElem.webkitMatchesSelector || docElem.oMatchesSelector || docElem.msMatchesSelector; // Build QSA regex // Regex strategy adopted from Diego Perini assert(function( div ) { // Select is set to empty string on purpose // This is to test IE's treatment of not explictly // setting a boolean content attribute, // since its presence should be enough // http://bugs.jquery.com/ticket/12359 div.innerHTML = ""; // IE8 - Some boolean attributes are not treated correctly if ( !div.querySelectorAll("[selected]").length ) { rbuggyQSA.push( "\\[" + whitespace + "*(?:checked|disabled|ismap|multiple|readonly|selected|value)" ); } // Webkit/Opera - :checked should return selected option elements // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked // IE8 throws error here (do not put tests after this one) if ( !div.querySelectorAll(":checked").length ) { rbuggyQSA.push(":checked"); } }); assert(function( div ) { // Opera 10-12/IE9 - ^= $= *= and empty values // Should not select anything div.innerHTML = "

"; if ( div.querySelectorAll("[test^='']").length ) { rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:\"\"|'')" ); } // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) // IE8 throws error here (do not put tests after this one) div.innerHTML = ""; if ( !div.querySelectorAll(":enabled").length ) { rbuggyQSA.push(":enabled", ":disabled"); } }); // rbuggyQSA always contains :focus, so no need for a length check rbuggyQSA = /* rbuggyQSA.length && */ new RegExp( rbuggyQSA.join("|") ); select = function( selector, context, results, seed, xml ) { // Only use querySelectorAll when not filtering, // when this is not xml, // and when no QSA bugs apply if ( !seed && !xml && !rbuggyQSA.test( selector ) ) { var groups, i, old = true, nid = expando, newContext = context, newSelector = context.nodeType === 9 && selector; // qSA works strangely on Element-rooted queries // We can work around this by specifying an extra ID on the root // and working up from there (Thanks to Andrew Dupont for the technique) // IE 8 doesn't work on object elements if ( context.nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { groups = tokenize( selector ); if ( (old = context.getAttribute("id")) ) { nid = old.replace( rescape, "\\$&" ); } else { context.setAttribute( "id", nid ); } nid = "[id='" + nid + "'] "; i = groups.length; while ( i-- ) { groups[i] = nid + groups[i].join(""); } newContext = rsibling.test( selector ) && context.parentNode || context; newSelector = groups.join(","); } if ( newSelector ) { try { push.apply( results, slice.call( newContext.querySelectorAll( newSelector ), 0 ) ); return results; } catch(qsaError) { } finally { if ( !old ) { context.removeAttribute("id"); } } } } return oldSelect( selector, context, results, seed, xml ); }; if ( matches ) { assert(function( div ) { // Check to see if it's possible to do matchesSelector // on a disconnected node (IE 9) disconnectedMatch = matches.call( div, "div" ); // This should fail with an exception // Gecko does not error, returns false instead try { matches.call( div, "[test!='']:sizzle" ); rbuggyMatches.push( "!=", pseudos ); } catch ( e ) {} }); // rbuggyMatches always contains :active and :focus, so no need for a length check rbuggyMatches = /* rbuggyMatches.length && */ new RegExp( rbuggyMatches.join("|") ); Sizzle.matchesSelector = function( elem, expr ) { // Make sure that attribute selectors are quoted expr = expr.replace( rattributeQuotes, "='$1']" ); // rbuggyMatches always contains :active, so no need for an existence check if ( !isXML( elem ) && !rbuggyMatches.test( expr ) && !rbuggyQSA.test( expr ) ) { try { var ret = matches.call( elem, expr ); // IE 9's matchesSelector returns false on disconnected nodes if ( ret || disconnectedMatch || // As well, disconnected nodes are said to be in a document // fragment in IE 9 elem.document && elem.document.nodeType !== 11 ) { return ret; } } catch(e) {} } return Sizzle( expr, null, null, [ elem ] ).length > 0; }; } })(); } // Deprecated Expr.pseudos["nth"] = Expr.pseudos["eq"]; // Back-compat function setFilters() {} Expr.filters = setFilters.prototype = Expr.pseudos; Expr.setFilters = new setFilters(); // Override sizzle attribute retrieval Sizzle.attr = jQuery.attr; jQuery.find = Sizzle; jQuery.expr = Sizzle.selectors; jQuery.expr[":"] = jQuery.expr.pseudos; jQuery.unique = Sizzle.uniqueSort; jQuery.text = Sizzle.getText; jQuery.isXMLDoc = Sizzle.isXML; jQuery.contains = Sizzle.contains; })( window ); var runtil = /Until$/, rparentsprev = /^(?:parents|prev(?:Until|All))/, isSimple = /^.[^:#\[\.,]*$/, rneedsContext = jQuery.expr.match.needsContext, // methods guaranteed to produce a unique set when starting from a unique set guaranteedUnique = { children: true, contents: true, next: true, prev: true }; jQuery.fn.extend({ find: function( selector ) { var i, l, length, n, r, ret, self = this; if ( typeof selector !== "string" ) { return jQuery( selector ).filter(function() { for ( i = 0, l = self.length; i < l; i++ ) { if ( jQuery.contains( self[ i ], this ) ) { return true; } } }); } ret = this.pushStack( "", "find", selector ); for ( i = 0, l = this.length; i < l; i++ ) { length = ret.length; jQuery.find( selector, this[i], ret ); if ( i > 0 ) { // Make sure that the results are unique for ( n = length; n < ret.length; n++ ) { for ( r = 0; r < length; r++ ) { if ( ret[r] === ret[n] ) { ret.splice(n--, 1); break; } } } } } return ret; }, has: function( target ) { var i, targets = jQuery( target, this ), len = targets.length; return this.filter(function() { for ( i = 0; i < len; i++ ) { if ( jQuery.contains( this, targets[i] ) ) { return true; } } }); }, not: function( selector ) { return this.pushStack( winnow(this, selector, false), "not", selector); }, filter: function( selector ) { return this.pushStack( winnow(this, selector, true), "filter", selector ); }, is: function( selector ) { return !!selector && ( typeof selector === "string" ? // If this is a positional/relative selector, check membership in the returned set // so $("p:first").is("p:last") won't return true for a doc with two "p". rneedsContext.test( selector ) ? jQuery( selector, this.context ).index( this[0] ) >= 0 : jQuery.filter( selector, this ).length > 0 : this.filter( selector ).length > 0 ); }, closest: function( selectors, context ) { var cur, i = 0, l = this.length, ret = [], pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ? jQuery( selectors, context || this.context ) : 0; for ( ; i < l; i++ ) { cur = this[i]; while ( cur && cur.ownerDocument && cur !== context && cur.nodeType !== 11 ) { if ( pos ? pos.index(cur) > -1 : jQuery.find.matchesSelector(cur, selectors) ) { ret.push( cur ); break; } cur = cur.parentNode; } } ret = ret.length > 1 ? jQuery.unique( ret ) : ret; return this.pushStack( ret, "closest", selectors ); }, // Determine the position of an element within // the matched set of elements index: function( elem ) { // No argument, return index in parent if ( !elem ) { return ( this[0] && this[0].parentNode ) ? this.prevAll().length : -1; } // index in selector if ( typeof elem === "string" ) { return jQuery.inArray( this[0], jQuery( elem ) ); } // Locate the position of the desired element return jQuery.inArray( // If it receives a jQuery object, the first element is used elem.jquery ? elem[0] : elem, this ); }, add: function( selector, context ) { var set = typeof selector === "string" ? jQuery( selector, context ) : jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), all = jQuery.merge( this.get(), set ); return this.pushStack( isDisconnected( set[0] ) || isDisconnected( all[0] ) ? all : jQuery.unique( all ) ); }, addBack: function( selector ) { return this.add( selector == null ? this.prevObject : this.prevObject.filter(selector) ); } }); jQuery.fn.andSelf = jQuery.fn.addBack; // A painfully simple check to see if an element is disconnected // from a document (should be improved, where feasible). function isDisconnected( node ) { return !node || !node.parentNode || node.parentNode.nodeType === 11; } function sibling( cur, dir ) { do { cur = cur[ dir ]; } while ( cur && cur.nodeType !== 1 ); return cur; } jQuery.each({ parent: function( elem ) { var parent = elem.parentNode; return parent && parent.nodeType !== 11 ? parent : null; }, parents: function( elem ) { return jQuery.dir( elem, "parentNode" ); }, parentsUntil: function( elem, i, until ) { return jQuery.dir( elem, "parentNode", until ); }, next: function( elem ) { return sibling( elem, "nextSibling" ); }, prev: function( elem ) { return sibling( elem, "previousSibling" ); }, nextAll: function( elem ) { return jQuery.dir( elem, "nextSibling" ); }, prevAll: function( elem ) { return jQuery.dir( elem, "previousSibling" ); }, nextUntil: function( elem, i, until ) { return jQuery.dir( elem, "nextSibling", until ); }, prevUntil: function( elem, i, until ) { return jQuery.dir( elem, "previousSibling", until ); }, siblings: function( elem ) { return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem ); }, children: function( elem ) { return jQuery.sibling( elem.firstChild ); }, contents: function( elem ) { return jQuery.nodeName( elem, "iframe" ) ? elem.contentDocument || elem.contentWindow.document : jQuery.merge( [], elem.childNodes ); } }, function( name, fn ) { jQuery.fn[ name ] = function( until, selector ) { var ret = jQuery.map( this, fn, until ); if ( !runtil.test( name ) ) { selector = until; } if ( selector && typeof selector === "string" ) { ret = jQuery.filter( selector, ret ); } ret = this.length > 1 && !guaranteedUnique[ name ] ? jQuery.unique( ret ) : ret; if ( this.length > 1 && rparentsprev.test( name ) ) { ret = ret.reverse(); } return this.pushStack( ret, name, core_slice.call( arguments ).join(",") ); }; }); jQuery.extend({ filter: function( expr, elems, not ) { if ( not ) { expr = ":not(" + expr + ")"; } return elems.length === 1 ? jQuery.find.matchesSelector(elems[0], expr) ? [ elems[0] ] : [] : jQuery.find.matches(expr, elems); }, dir: function( elem, dir, until ) { var matched = [], cur = elem[ dir ]; while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { if ( cur.nodeType === 1 ) { matched.push( cur ); } cur = cur[dir]; } return matched; }, sibling: function( n, elem ) { var r = []; for ( ; n; n = n.nextSibling ) { if ( n.nodeType === 1 && n !== elem ) { r.push( n ); } } return r; } }); // Implement the identical functionality for filter and not function winnow( elements, qualifier, keep ) { // Can't pass null or undefined to indexOf in Firefox 4 // Set to 0 to skip string check qualifier = qualifier || 0; if ( jQuery.isFunction( qualifier ) ) { return jQuery.grep(elements, function( elem, i ) { var retVal = !!qualifier.call( elem, i, elem ); return retVal === keep; }); } else if ( qualifier.nodeType ) { return jQuery.grep(elements, function( elem, i ) { return ( elem === qualifier ) === keep; }); } else if ( typeof qualifier === "string" ) { var filtered = jQuery.grep(elements, function( elem ) { return elem.nodeType === 1; }); if ( isSimple.test( qualifier ) ) { return jQuery.filter(qualifier, filtered, !keep); } else { qualifier = jQuery.filter( qualifier, filtered ); } } return jQuery.grep(elements, function( elem, i ) { return ( jQuery.inArray( elem, qualifier ) >= 0 ) === keep; }); } function createSafeFragment( document ) { var list = nodeNames.split( "|" ), safeFrag = document.createDocumentFragment(); if ( safeFrag.createElement ) { while ( list.length ) { safeFrag.createElement( list.pop() ); } } return safeFrag; } var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|" + "header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g, rleadingWhitespace = /^\s+/, rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, rtagName = /<([\w:]+)/, rtbody = /]", "i"), rcheckableType = /^(?:checkbox|radio)$/, // checked="checked" or checked rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i, rscriptType = /\/(java|ecma)script/i, rcleanScript = /^\s*\s*$/g, wrapMap = { option: [ 1, "" ], legend: [ 1, "
", "
" ], thead: [ 1, "", "
" ], tr: [ 2, "", "
" ], td: [ 3, "", "
" ], col: [ 2, "", "
" ], area: [ 1, "", "" ], _default: [ 0, "", "" ] }, safeFragment = createSafeFragment( document ), fragmentDiv = safeFragment.appendChild( document.createElement("div") ); wrapMap.optgroup = wrapMap.option; wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; wrapMap.th = wrapMap.td; // IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags, // unless wrapped in a div with non-breaking characters in front of it. if ( !jQuery.support.htmlSerialize ) { wrapMap._default = [ 1, "X
", "
" ]; } jQuery.fn.extend({ text: function( value ) { return jQuery.access( this, function( value ) { return value === undefined ? jQuery.text( this ) : this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) ); }, null, value, arguments.length ); }, wrapAll: function( html ) { if ( jQuery.isFunction( html ) ) { return this.each(function(i) { jQuery(this).wrapAll( html.call(this, i) ); }); } if ( this[0] ) { // The elements to wrap the target around var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true); if ( this[0].parentNode ) { wrap.insertBefore( this[0] ); } wrap.map(function() { var elem = this; while ( elem.firstChild && elem.firstChild.nodeType === 1 ) { elem = elem.firstChild; } return elem; }).append( this ); } return this; }, wrapInner: function( html ) { if ( jQuery.isFunction( html ) ) { return this.each(function(i) { jQuery(this).wrapInner( html.call(this, i) ); }); } return this.each(function() { var self = jQuery( this ), contents = self.contents(); if ( contents.length ) { contents.wrapAll( html ); } else { self.append( html ); } }); }, wrap: function( html ) { var isFunction = jQuery.isFunction( html ); return this.each(function(i) { jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html ); }); }, unwrap: function() { return this.parent().each(function() { if ( !jQuery.nodeName( this, "body" ) ) { jQuery( this ).replaceWith( this.childNodes ); } }).end(); }, append: function() { return this.domManip(arguments, true, function( elem ) { if ( this.nodeType === 1 || this.nodeType === 11 ) { this.appendChild( elem ); } }); }, prepend: function() { return this.domManip(arguments, true, function( elem ) { if ( this.nodeType === 1 || this.nodeType === 11 ) { this.insertBefore( elem, this.firstChild ); } }); }, before: function() { if ( !isDisconnected( this[0] ) ) { return this.domManip(arguments, false, function( elem ) { this.parentNode.insertBefore( elem, this ); }); } if ( arguments.length ) { var set = jQuery.clean( arguments ); return this.pushStack( jQuery.merge( set, this ), "before", this.selector ); } }, after: function() { if ( !isDisconnected( this[0] ) ) { return this.domManip(arguments, false, function( elem ) { this.parentNode.insertBefore( elem, this.nextSibling ); }); } if ( arguments.length ) { var set = jQuery.clean( arguments ); return this.pushStack( jQuery.merge( this, set ), "after", this.selector ); } }, // keepData is for internal use only--do not document remove: function( selector, keepData ) { var elem, i = 0; for ( ; (elem = this[i]) != null; i++ ) { if ( !selector || jQuery.filter( selector, [ elem ] ).length ) { if ( !keepData && elem.nodeType === 1 ) { jQuery.cleanData( elem.getElementsByTagName("*") ); jQuery.cleanData( [ elem ] ); } if ( elem.parentNode ) { elem.parentNode.removeChild( elem ); } } } return this; }, empty: function() { var elem, i = 0; for ( ; (elem = this[i]) != null; i++ ) { // Remove element nodes and prevent memory leaks if ( elem.nodeType === 1 ) { jQuery.cleanData( elem.getElementsByTagName("*") ); } // Remove any remaining nodes while ( elem.firstChild ) { elem.removeChild( elem.firstChild ); } } return this; }, clone: function( dataAndEvents, deepDataAndEvents ) { dataAndEvents = dataAndEvents == null ? false : dataAndEvents; deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; return this.map( function () { return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); }); }, html: function( value ) { return jQuery.access( this, function( value ) { var elem = this[0] || {}, i = 0, l = this.length; if ( value === undefined ) { return elem.nodeType === 1 ? elem.innerHTML.replace( rinlinejQuery, "" ) : undefined; } // See if we can take a shortcut and just use innerHTML if ( typeof value === "string" && !rnoInnerhtml.test( value ) && ( jQuery.support.htmlSerialize || !rnoshimcache.test( value ) ) && ( jQuery.support.leadingWhitespace || !rleadingWhitespace.test( value ) ) && !wrapMap[ ( rtagName.exec( value ) || ["", ""] )[1].toLowerCase() ] ) { value = value.replace( rxhtmlTag, "<$1>" ); try { for (; i < l; i++ ) { // Remove element nodes and prevent memory leaks elem = this[i] || {}; if ( elem.nodeType === 1 ) { jQuery.cleanData( elem.getElementsByTagName( "*" ) ); elem.innerHTML = value; } } elem = 0; // If using innerHTML throws an exception, use the fallback method } catch(e) {} } if ( elem ) { this.empty().append( value ); } }, null, value, arguments.length ); }, replaceWith: function( value ) { if ( !isDisconnected( this[0] ) ) { // Make sure that the elements are removed from the DOM before they are inserted // this can help fix replacing a parent with child elements if ( jQuery.isFunction( value ) ) { return this.each(function(i) { var self = jQuery(this), old = self.html(); self.replaceWith( value.call( this, i, old ) ); }); } if ( typeof value !== "string" ) { value = jQuery( value ).detach(); } return this.each(function() { var next = this.nextSibling, parent = this.parentNode; jQuery( this ).remove(); if ( next ) { jQuery(next).before( value ); } else { jQuery(parent).append( value ); } }); } return this.length ? this.pushStack( jQuery(jQuery.isFunction(value) ? value() : value), "replaceWith", value ) : this; }, detach: function( selector ) { return this.remove( selector, true ); }, domManip: function( args, table, callback ) { // Flatten any nested arrays args = [].concat.apply( [], args ); var results, first, fragment, iNoClone, i = 0, value = args[0], scripts = [], l = this.length; // We can't cloneNode fragments that contain checked, in WebKit if ( !jQuery.support.checkClone && l > 1 && typeof value === "string" && rchecked.test( value ) ) { return this.each(function() { jQuery(this).domManip( args, table, callback ); }); } if ( jQuery.isFunction(value) ) { return this.each(function(i) { var self = jQuery(this); args[0] = value.call( this, i, table ? self.html() : undefined ); self.domManip( args, table, callback ); }); } if ( this[0] ) { results = jQuery.buildFragment( args, this, scripts ); fragment = results.fragment; first = fragment.firstChild; if ( fragment.childNodes.length === 1 ) { fragment = first; } if ( first ) { table = table && jQuery.nodeName( first, "tr" ); // Use the original fragment for the last item instead of the first because it can end up // being emptied incorrectly in certain situations (#8070). // Fragments from the fragment cache must always be cloned and never used in place. for ( iNoClone = results.cacheable || l - 1; i < l; i++ ) { callback.call( table && jQuery.nodeName( this[i], "table" ) ? findOrAppend( this[i], "tbody" ) : this[i], i === iNoClone ? fragment : jQuery.clone( fragment, true, true ) ); } } // Fix #11809: Avoid leaking memory fragment = first = null; if ( scripts.length ) { jQuery.each( scripts, function( i, elem ) { if ( elem.src ) { if ( jQuery.ajax ) { jQuery.ajax({ url: elem.src, type: "GET", dataType: "script", async: false, global: false, "throws": true }); } else { jQuery.error("no ajax"); } } else { jQuery.globalEval( ( elem.text || elem.textContent || elem.innerHTML || "" ).replace( rcleanScript, "" ) ); } if ( elem.parentNode ) { elem.parentNode.removeChild( elem ); } }); } } return this; } }); function findOrAppend( elem, tag ) { return elem.getElementsByTagName( tag )[0] || elem.appendChild( elem.ownerDocument.createElement( tag ) ); } function cloneCopyEvent( src, dest ) { if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) { return; } var type, i, l, oldData = jQuery._data( src ), curData = jQuery._data( dest, oldData ), events = oldData.events; if ( events ) { delete curData.handle; curData.events = {}; for ( type in events ) { for ( i = 0, l = events[ type ].length; i < l; i++ ) { jQuery.event.add( dest, type, events[ type ][ i ] ); } } } // make the cloned public data object a copy from the original if ( curData.data ) { curData.data = jQuery.extend( {}, curData.data ); } } function cloneFixAttributes( src, dest ) { var nodeName; // We do not need to do anything for non-Elements if ( dest.nodeType !== 1 ) { return; } // clearAttributes removes the attributes, which we don't want, // but also removes the attachEvent events, which we *do* want if ( dest.clearAttributes ) { dest.clearAttributes(); } // mergeAttributes, in contrast, only merges back on the // original attributes, not the events if ( dest.mergeAttributes ) { dest.mergeAttributes( src ); } nodeName = dest.nodeName.toLowerCase(); if ( nodeName === "object" ) { // IE6-10 improperly clones children of object elements using classid. // IE10 throws NoModificationAllowedError if parent is null, #12132. if ( dest.parentNode ) { dest.outerHTML = src.outerHTML; } // This path appears unavoidable for IE9. When cloning an object // element in IE9, the outerHTML strategy above is not sufficient. // If the src has innerHTML and the destination does not, // copy the src.innerHTML into the dest.innerHTML. #10324 if ( jQuery.support.html5Clone && (src.innerHTML && !jQuery.trim(dest.innerHTML)) ) { dest.innerHTML = src.innerHTML; } } else if ( nodeName === "input" && rcheckableType.test( src.type ) ) { // IE6-8 fails to persist the checked state of a cloned checkbox // or radio button. Worse, IE6-7 fail to give the cloned element // a checked appearance if the defaultChecked value isn't also set dest.defaultChecked = dest.checked = src.checked; // IE6-7 get confused and end up setting the value of a cloned // checkbox/radio button to an empty string instead of "on" if ( dest.value !== src.value ) { dest.value = src.value; } // IE6-8 fails to return the selected option to the default selected // state when cloning options } else if ( nodeName === "option" ) { dest.selected = src.defaultSelected; // IE6-8 fails to set the defaultValue to the correct value when // cloning other types of input fields } else if ( nodeName === "input" || nodeName === "textarea" ) { dest.defaultValue = src.defaultValue; // IE blanks contents when cloning scripts } else if ( nodeName === "script" && dest.text !== src.text ) { dest.text = src.text; } // Event data gets referenced instead of copied if the expando // gets copied too dest.removeAttribute( jQuery.expando ); } jQuery.buildFragment = function( args, context, scripts ) { var fragment, cacheable, cachehit, first = args[ 0 ]; // Set context from what may come in as undefined or a jQuery collection or a node // Updated to fix #12266 where accessing context[0] could throw an exception in IE9/10 & // also doubles as fix for #8950 where plain objects caused createDocumentFragment exception context = context || document; context = !context.nodeType && context[0] || context; context = context.ownerDocument || context; // Only cache "small" (1/2 KB) HTML strings that are associated with the main document // Cloning options loses the selected state, so don't cache them // IE 6 doesn't like it when you put or elements in a fragment // Also, WebKit does not clone 'checked' attributes on cloneNode, so don't cache // Lastly, IE6,7,8 will not correctly reuse cached fragments that were created from unknown elems #10501 if ( args.length === 1 && typeof first === "string" && first.length < 512 && context === document && first.charAt(0) === "<" && !rnocache.test( first ) && (jQuery.support.checkClone || !rchecked.test( first )) && (jQuery.support.html5Clone || !rnoshimcache.test( first )) ) { // Mark cacheable and look for a hit cacheable = true; fragment = jQuery.fragments[ first ]; cachehit = fragment !== undefined; } if ( !fragment ) { fragment = context.createDocumentFragment(); jQuery.clean( args, context, fragment, scripts ); // Update the cache, but only store false // unless this is a second parsing of the same content if ( cacheable ) { jQuery.fragments[ first ] = cachehit && fragment; } } return { fragment: fragment, cacheable: cacheable }; }; jQuery.fragments = {}; jQuery.each({ appendTo: "append", prependTo: "prepend", insertBefore: "before", insertAfter: "after", replaceAll: "replaceWith" }, function( name, original ) { jQuery.fn[ name ] = function( selector ) { var elems, i = 0, ret = [], insert = jQuery( selector ), l = insert.length, parent = this.length === 1 && this[0].parentNode; if ( (parent == null || parent && parent.nodeType === 11 && parent.childNodes.length === 1) && l === 1 ) { insert[ original ]( this[0] ); return this; } else { for ( ; i < l; i++ ) { elems = ( i > 0 ? this.clone(true) : this ).get(); jQuery( insert[i] )[ original ]( elems ); ret = ret.concat( elems ); } return this.pushStack( ret, name, insert.selector ); } }; }); function getAll( elem ) { if ( typeof elem.getElementsByTagName !== "undefined" ) { return elem.getElementsByTagName( "*" ); } else if ( typeof elem.querySelectorAll !== "undefined" ) { return elem.querySelectorAll( "*" ); } else { return []; } } // Used in clean, fixes the defaultChecked property function fixDefaultChecked( elem ) { if ( rcheckableType.test( elem.type ) ) { elem.defaultChecked = elem.checked; } } jQuery.extend({ clone: function( elem, dataAndEvents, deepDataAndEvents ) { var srcElements, destElements, i, clone; if ( jQuery.support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( "<" + elem.nodeName + ">" ) ) { clone = elem.cloneNode( true ); // IE<=8 does not properly clone detached, unknown element nodes } else { fragmentDiv.innerHTML = elem.outerHTML; fragmentDiv.removeChild( clone = fragmentDiv.firstChild ); } if ( (!jQuery.support.noCloneEvent || !jQuery.support.noCloneChecked) && (elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) { // IE copies events bound via attachEvent when using cloneNode. // Calling detachEvent on the clone will also remove the events // from the original. In order to get around this, we use some // proprietary methods to clear the events. Thanks to MooTools // guys for this hotness. cloneFixAttributes( elem, clone ); // Using Sizzle here is crazy slow, so we use getElementsByTagName instead srcElements = getAll( elem ); destElements = getAll( clone ); // Weird iteration because IE will replace the length property // with an element if you are cloning the body and one of the // elements on the page has a name or id of "length" for ( i = 0; srcElements[i]; ++i ) { // Ensure that the destination node is not null; Fixes #9587 if ( destElements[i] ) { cloneFixAttributes( srcElements[i], destElements[i] ); } } } // Copy the events from the original to the clone if ( dataAndEvents ) { cloneCopyEvent( elem, clone ); if ( deepDataAndEvents ) { srcElements = getAll( elem ); destElements = getAll( clone ); for ( i = 0; srcElements[i]; ++i ) { cloneCopyEvent( srcElements[i], destElements[i] ); } } } srcElements = destElements = null; // Return the cloned set return clone; }, clean: function( elems, context, fragment, scripts ) { var i, j, elem, tag, wrap, depth, div, hasBody, tbody, len, handleScript, jsTags, safe = context === document && safeFragment, ret = []; // Ensure that context is a document if ( !context || typeof context.createDocumentFragment === "undefined" ) { context = document; } // Use the already-created safe fragment if context permits for ( i = 0; (elem = elems[i]) != null; i++ ) { if ( typeof elem === "number" ) { elem += ""; } if ( !elem ) { continue; } // Convert html string into DOM nodes if ( typeof elem === "string" ) { if ( !rhtml.test( elem ) ) { elem = context.createTextNode( elem ); } else { // Ensure a safe container in which to render the html safe = safe || createSafeFragment( context ); div = context.createElement("div"); safe.appendChild( div ); // Fix "XHTML"-style tags in all browsers elem = elem.replace(rxhtmlTag, "<$1>"); // Go to html and back, then peel off extra wrappers tag = ( rtagName.exec( elem ) || ["", ""] )[1].toLowerCase(); wrap = wrapMap[ tag ] || wrapMap._default; depth = wrap[0]; div.innerHTML = wrap[1] + elem + wrap[2]; // Move to the right depth while ( depth-- ) { div = div.lastChild; } // Remove IE's autoinserted from table fragments if ( !jQuery.support.tbody ) { // String was a , *may* have spurious hasBody = rtbody.test(elem); tbody = tag === "table" && !hasBody ? div.firstChild && div.firstChild.childNodes : // String was a bare or wrap[1] === "
" && !hasBody ? div.childNodes : []; for ( j = tbody.length - 1; j >= 0 ; --j ) { if ( jQuery.nodeName( tbody[ j ], "tbody" ) && !tbody[ j ].childNodes.length ) { tbody[ j ].parentNode.removeChild( tbody[ j ] ); } } } // IE completely kills leading whitespace when innerHTML is used if ( !jQuery.support.leadingWhitespace && rleadingWhitespace.test( elem ) ) { div.insertBefore( context.createTextNode( rleadingWhitespace.exec(elem)[0] ), div.firstChild ); } elem = div.childNodes; // Take out of fragment container (we need a fresh div each time) div.parentNode.removeChild( div ); } } if ( elem.nodeType ) { ret.push( elem ); } else { jQuery.merge( ret, elem ); } } // Fix #11356: Clear elements from safeFragment if ( div ) { elem = div = safe = null; } // Reset defaultChecked for any radios and checkboxes // about to be appended to the DOM in IE 6/7 (#8060) if ( !jQuery.support.appendChecked ) { for ( i = 0; (elem = ret[i]) != null; i++ ) { if ( jQuery.nodeName( elem, "input" ) ) { fixDefaultChecked( elem ); } else if ( typeof elem.getElementsByTagName !== "undefined" ) { jQuery.grep( elem.getElementsByTagName("input"), fixDefaultChecked ); } } } // Append elements to a provided document fragment if ( fragment ) { // Special handling of each script element handleScript = function( elem ) { // Check if we consider it executable if ( !elem.type || rscriptType.test( elem.type ) ) { // Detach the script and store it in the scripts array (if provided) or the fragment // Return truthy to indicate that it has been handled return scripts ? scripts.push( elem.parentNode ? elem.parentNode.removeChild( elem ) : elem ) : fragment.appendChild( elem ); } }; for ( i = 0; (elem = ret[i]) != null; i++ ) { // Check if we're done after handling an executable script if ( !( jQuery.nodeName( elem, "script" ) && handleScript( elem ) ) ) { // Append to fragment and handle embedded scripts fragment.appendChild( elem ); if ( typeof elem.getElementsByTagName !== "undefined" ) { // handleScript alters the DOM, so use jQuery.merge to ensure snapshot iteration jsTags = jQuery.grep( jQuery.merge( [], elem.getElementsByTagName("script") ), handleScript ); // Splice the scripts into ret after their former ancestor and advance our index beyond them ret.splice.apply( ret, [i + 1, 0].concat( jsTags ) ); i += jsTags.length; } } } } return ret; }, cleanData: function( elems, /* internal */ acceptData ) { var data, id, elem, type, i = 0, internalKey = jQuery.expando, cache = jQuery.cache, deleteExpando = jQuery.support.deleteExpando, special = jQuery.event.special; for ( ; (elem = elems[i]) != null; i++ ) { if ( acceptData || jQuery.acceptData( elem ) ) { id = elem[ internalKey ]; data = id && cache[ id ]; if ( data ) { if ( data.events ) { for ( type in data.events ) { if ( special[ type ] ) { jQuery.event.remove( elem, type ); // This is a shortcut to avoid jQuery.event.remove's overhead } else { jQuery.removeEvent( elem, type, data.handle ); } } } // Remove cache only if it was not already removed by jQuery.event.remove if ( cache[ id ] ) { delete cache[ id ]; // IE does not allow us to delete expando properties from nodes, // nor does it have a removeAttribute function on Document nodes; // we must handle all of these cases if ( deleteExpando ) { delete elem[ internalKey ]; } else if ( elem.removeAttribute ) { elem.removeAttribute( internalKey ); } else { elem[ internalKey ] = null; } jQuery.deletedIds.push( id ); } } } } } }); // Limit scope pollution from any deprecated API (function() { var matched, browser; // Use of jQuery.browser is frowned upon. // More details: http://api.jquery.com/jQuery.browser // jQuery.uaMatch maintained for back-compat jQuery.uaMatch = function( ua ) { ua = ua.toLowerCase(); var match = /(chrome)[ \/]([\w.]+)/.exec( ua ) || /(webkit)[ \/]([\w.]+)/.exec( ua ) || /(opera)(?:.*version|)[ \/]([\w.]+)/.exec( ua ) || /(msie) ([\w.]+)/.exec( ua ) || ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec( ua ) || []; return { browser: match[ 1 ] || "", version: match[ 2 ] || "0" }; }; matched = jQuery.uaMatch( navigator.userAgent ); browser = {}; if ( matched.browser ) { browser[ matched.browser ] = true; browser.version = matched.version; } // Chrome is Webkit, but Webkit is also Safari. if ( browser.chrome ) { browser.webkit = true; } else if ( browser.webkit ) { browser.safari = true; } jQuery.browser = browser; jQuery.sub = function() { function jQuerySub( selector, context ) { return new jQuerySub.fn.init( selector, context ); } jQuery.extend( true, jQuerySub, this ); jQuerySub.superclass = this; jQuerySub.fn = jQuerySub.prototype = this(); jQuerySub.fn.constructor = jQuerySub; jQuerySub.sub = this.sub; jQuerySub.fn.init = function init( selector, context ) { if ( context && context instanceof jQuery && !(context instanceof jQuerySub) ) { context = jQuerySub( context ); } return jQuery.fn.init.call( this, selector, context, rootjQuerySub ); }; jQuerySub.fn.init.prototype = jQuerySub.fn; var rootjQuerySub = jQuerySub(document); return jQuerySub; }; })(); var curCSS, iframe, iframeDoc, ralpha = /alpha\([^)]*\)/i, ropacity = /opacity=([^)]*)/, rposition = /^(top|right|bottom|left)$/, // swappable if display is none or starts with table except "table", "table-cell", or "table-caption" // see here for display values: https://developer.mozilla.org/en-US/docs/CSS/display rdisplayswap = /^(none|table(?!-c[ea]).+)/, rmargin = /^margin/, rnumsplit = new RegExp( "^(" + core_pnum + ")(.*)$", "i" ), rnumnonpx = new RegExp( "^(" + core_pnum + ")(?!px)[a-z%]+$", "i" ), rrelNum = new RegExp( "^([-+])=(" + core_pnum + ")", "i" ), elemdisplay = { BODY: "block" }, cssShow = { position: "absolute", visibility: "hidden", display: "block" }, cssNormalTransform = { letterSpacing: 0, fontWeight: 400 }, cssExpand = [ "Top", "Right", "Bottom", "Left" ], cssPrefixes = [ "Webkit", "O", "Moz", "ms" ], eventsToggle = jQuery.fn.toggle; // return a css property mapped to a potentially vendor prefixed property function vendorPropName( style, name ) { // shortcut for names that are not vendor prefixed if ( name in style ) { return name; } // check for vendor prefixed names var capName = name.charAt(0).toUpperCase() + name.slice(1), origName = name, i = cssPrefixes.length; while ( i-- ) { name = cssPrefixes[ i ] + capName; if ( name in style ) { return name; } } return origName; } function isHidden( elem, el ) { elem = el || elem; return jQuery.css( elem, "display" ) === "none" || !jQuery.contains( elem.ownerDocument, elem ); } function showHide( elements, show ) { var elem, display, values = [], index = 0, length = elements.length; for ( ; index < length; index++ ) { elem = elements[ index ]; if ( !elem.style ) { continue; } values[ index ] = jQuery._data( elem, "olddisplay" ); if ( show ) { // Reset the inline display of this element to learn if it is // being hidden by cascaded rules or not if ( !values[ index ] && elem.style.display === "none" ) { elem.style.display = ""; } // Set elements which have been overridden with display: none // in a stylesheet to whatever the default browser style is // for such an element if ( elem.style.display === "" && isHidden( elem ) ) { values[ index ] = jQuery._data( elem, "olddisplay", css_defaultDisplay(elem.nodeName) ); } } else { display = curCSS( elem, "display" ); if ( !values[ index ] && display !== "none" ) { jQuery._data( elem, "olddisplay", display ); } } } // Set the display of most of the elements in a second loop // to avoid the constant reflow for ( index = 0; index < length; index++ ) { elem = elements[ index ]; if ( !elem.style ) { continue; } if ( !show || elem.style.display === "none" || elem.style.display === "" ) { elem.style.display = show ? values[ index ] || "" : "none"; } } return elements; } jQuery.fn.extend({ css: function( name, value ) { return jQuery.access( this, function( elem, name, value ) { return value !== undefined ? jQuery.style( elem, name, value ) : jQuery.css( elem, name ); }, name, value, arguments.length > 1 ); }, show: function() { return showHide( this, true ); }, hide: function() { return showHide( this ); }, toggle: function( state, fn2 ) { var bool = typeof state === "boolean"; if ( jQuery.isFunction( state ) && jQuery.isFunction( fn2 ) ) { return eventsToggle.apply( this, arguments ); } return this.each(function() { if ( bool ? state : isHidden( this ) ) { jQuery( this ).show(); } else { jQuery( this ).hide(); } }); } }); jQuery.extend({ // Add in style property hooks for overriding the default // behavior of getting and setting a style property cssHooks: { opacity: { get: function( elem, computed ) { if ( computed ) { // We should always get a number back from opacity var ret = curCSS( elem, "opacity" ); return ret === "" ? "1" : ret; } } } }, // Exclude the following css properties to add px cssNumber: { "fillOpacity": true, "fontWeight": true, "lineHeight": true, "opacity": true, "orphans": true, "widows": true, "zIndex": true, "zoom": true }, // Add in properties whose names you wish to fix before // setting or getting the value cssProps: { // normalize float css property "float": jQuery.support.cssFloat ? "cssFloat" : "styleFloat" }, // Get and set the style property on a DOM Node style: function( elem, name, value, extra ) { // Don't set styles on text and comment nodes if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { return; } // Make sure that we're working with the right name var ret, type, hooks, origName = jQuery.camelCase( name ), style = elem.style; name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( style, origName ) ); // gets hook for the prefixed version // followed by the unprefixed version hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; // Check if we're setting a value if ( value !== undefined ) { type = typeof value; // convert relative number strings (+= or -=) to relative numbers. #7345 if ( type === "string" && (ret = rrelNum.exec( value )) ) { value = ( ret[1] + 1 ) * ret[2] + parseFloat( jQuery.css( elem, name ) ); // Fixes bug #9237 type = "number"; } // Make sure that NaN and null values aren't set. See: #7116 if ( value == null || type === "number" && isNaN( value ) ) { return; } // If a number was passed in, add 'px' to the (except for certain CSS properties) if ( type === "number" && !jQuery.cssNumber[ origName ] ) { value += "px"; } // If a hook was provided, use that value, otherwise just set the specified value if ( !hooks || !("set" in hooks) || (value = hooks.set( elem, value, extra )) !== undefined ) { // Wrapped to prevent IE from throwing errors when 'invalid' values are provided // Fixes bug #5509 try { style[ name ] = value; } catch(e) {} } } else { // If a hook was provided get the non-computed value from there if ( hooks && "get" in hooks && (ret = hooks.get( elem, false, extra )) !== undefined ) { return ret; } // Otherwise just get the value from the style object return style[ name ]; } }, css: function( elem, name, numeric, extra ) { var val, num, hooks, origName = jQuery.camelCase( name ); // Make sure that we're working with the right name name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( elem.style, origName ) ); // gets hook for the prefixed version // followed by the unprefixed version hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; // If a hook was provided get the computed value from there if ( hooks && "get" in hooks ) { val = hooks.get( elem, true, extra ); } // Otherwise, if a way to get the computed value exists, use that if ( val === undefined ) { val = curCSS( elem, name ); } //convert "normal" to computed value if ( val === "normal" && name in cssNormalTransform ) { val = cssNormalTransform[ name ]; } // Return, converting to number if forced or a qualifier was provided and val looks numeric if ( numeric || extra !== undefined ) { num = parseFloat( val ); return numeric || jQuery.isNumeric( num ) ? num || 0 : val; } return val; }, // A method for quickly swapping in/out CSS properties to get correct calculations swap: function( elem, options, callback ) { var ret, name, old = {}; // Remember the old values, and insert the new ones for ( name in options ) { old[ name ] = elem.style[ name ]; elem.style[ name ] = options[ name ]; } ret = callback.call( elem ); // Revert the old values for ( name in options ) { elem.style[ name ] = old[ name ]; } return ret; } }); // NOTE: To any future maintainer, we've window.getComputedStyle // because jsdom on node.js will break without it. if ( window.getComputedStyle ) { curCSS = function( elem, name ) { var ret, width, minWidth, maxWidth, computed = window.getComputedStyle( elem, null ), style = elem.style; if ( computed ) { // getPropertyValue is only needed for .css('filter') in IE9, see #12537 ret = computed.getPropertyValue( name ) || computed[ name ]; if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) { ret = jQuery.style( elem, name ); } // A tribute to the "awesome hack by Dean Edwards" // Chrome < 17 and Safari 5.0 uses "computed value" instead of "used value" for margin-right // Safari 5.1.7 (at least) returns percentage for a larger set of values, but width seems to be reliably pixels // this is against the CSSOM draft spec: http://dev.w3.org/csswg/cssom/#resolved-values if ( rnumnonpx.test( ret ) && rmargin.test( name ) ) { width = style.width; minWidth = style.minWidth; maxWidth = style.maxWidth; style.minWidth = style.maxWidth = style.width = ret; ret = computed.width; style.width = width; style.minWidth = minWidth; style.maxWidth = maxWidth; } } return ret; }; } else if ( document.documentElement.currentStyle ) { curCSS = function( elem, name ) { var left, rsLeft, ret = elem.currentStyle && elem.currentStyle[ name ], style = elem.style; // Avoid setting ret to empty string here // so we don't default to auto if ( ret == null && style && style[ name ] ) { ret = style[ name ]; } // From the awesome hack by Dean Edwards // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291 // If we're not dealing with a regular pixel number // but a number that has a weird ending, we need to convert it to pixels // but not position css attributes, as those are proportional to the parent element instead // and we can't measure the parent instead because it might trigger a "stacking dolls" problem if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) { // Remember the original values left = style.left; rsLeft = elem.runtimeStyle && elem.runtimeStyle.left; // Put in the new values to get a computed value out if ( rsLeft ) { elem.runtimeStyle.left = elem.currentStyle.left; } style.left = name === "fontSize" ? "1em" : ret; ret = style.pixelLeft + "px"; // Revert the changed values style.left = left; if ( rsLeft ) { elem.runtimeStyle.left = rsLeft; } } return ret === "" ? "auto" : ret; }; } function setPositiveNumber( elem, value, subtract ) { var matches = rnumsplit.exec( value ); return matches ? Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || "px" ) : value; } function augmentWidthOrHeight( elem, name, extra, isBorderBox ) { var i = extra === ( isBorderBox ? "border" : "content" ) ? // If we already have the right measurement, avoid augmentation 4 : // Otherwise initialize for horizontal or vertical properties name === "width" ? 1 : 0, val = 0; for ( ; i < 4; i += 2 ) { // both box models exclude margin, so add it if we want it if ( extra === "margin" ) { // we use jQuery.css instead of curCSS here // because of the reliableMarginRight CSS hook! val += jQuery.css( elem, extra + cssExpand[ i ], true ); } // From this point on we use curCSS for maximum performance (relevant in animations) if ( isBorderBox ) { // border-box includes padding, so remove it if we want content if ( extra === "content" ) { val -= parseFloat( curCSS( elem, "padding" + cssExpand[ i ] ) ) || 0; } // at this point, extra isn't border nor margin, so remove border if ( extra !== "margin" ) { val -= parseFloat( curCSS( elem, "border" + cssExpand[ i ] + "Width" ) ) || 0; } } else { // at this point, extra isn't content, so add padding val += parseFloat( curCSS( elem, "padding" + cssExpand[ i ] ) ) || 0; // at this point, extra isn't content nor padding, so add border if ( extra !== "padding" ) { val += parseFloat( curCSS( elem, "border" + cssExpand[ i ] + "Width" ) ) || 0; } } } return val; } function getWidthOrHeight( elem, name, extra ) { // Start with offset property, which is equivalent to the border-box value var val = name === "width" ? elem.offsetWidth : elem.offsetHeight, valueIsBorderBox = true, isBorderBox = jQuery.support.boxSizing && jQuery.css( elem, "boxSizing" ) === "border-box"; // some non-html elements return undefined for offsetWidth, so check for null/undefined // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285 // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668 if ( val <= 0 || val == null ) { // Fall back to computed then uncomputed css if necessary val = curCSS( elem, name ); if ( val < 0 || val == null ) { val = elem.style[ name ]; } // Computed unit is not pixels. Stop here and return. if ( rnumnonpx.test(val) ) { return val; } // we need the check for style in case a browser which returns unreliable values // for getComputedStyle silently falls back to the reliable elem.style valueIsBorderBox = isBorderBox && ( jQuery.support.boxSizingReliable || val === elem.style[ name ] ); // Normalize "", auto, and prepare for extra val = parseFloat( val ) || 0; } // use the active box-sizing model to add/subtract irrelevant styles return ( val + augmentWidthOrHeight( elem, name, extra || ( isBorderBox ? "border" : "content" ), valueIsBorderBox ) ) + "px"; } // Try to determine the default display value of an element function css_defaultDisplay( nodeName ) { if ( elemdisplay[ nodeName ] ) { return elemdisplay[ nodeName ]; } var elem = jQuery( "<" + nodeName + ">" ).appendTo( document.body ), display = elem.css("display"); elem.remove(); // If the simple way fails, // get element's real default display by attaching it to a temp iframe if ( display === "none" || display === "" ) { // Use the already-created iframe if possible iframe = document.body.appendChild( iframe || jQuery.extend( document.createElement("iframe"), { frameBorder: 0, width: 0, height: 0 }) ); // Create a cacheable copy of the iframe document on first call. // IE and Opera will allow us to reuse the iframeDoc without re-writing the fake HTML // document to it; WebKit & Firefox won't allow reusing the iframe document. if ( !iframeDoc || !iframe.createElement ) { iframeDoc = ( iframe.contentWindow || iframe.contentDocument ).document; iframeDoc.write(""); iframeDoc.close(); } elem = iframeDoc.body.appendChild( iframeDoc.createElement(nodeName) ); display = curCSS( elem, "display" ); document.body.removeChild( iframe ); } // Store the correct default display elemdisplay[ nodeName ] = display; return display; } jQuery.each([ "height", "width" ], function( i, name ) { jQuery.cssHooks[ name ] = { get: function( elem, computed, extra ) { if ( computed ) { // certain elements can have dimension info if we invisibly show them // however, it must have a current display style that would benefit from this if ( elem.offsetWidth === 0 && rdisplayswap.test( curCSS( elem, "display" ) ) ) { return jQuery.swap( elem, cssShow, function() { return getWidthOrHeight( elem, name, extra ); }); } else { return getWidthOrHeight( elem, name, extra ); } } }, set: function( elem, value, extra ) { return setPositiveNumber( elem, value, extra ? augmentWidthOrHeight( elem, name, extra, jQuery.support.boxSizing && jQuery.css( elem, "boxSizing" ) === "border-box" ) : 0 ); } }; }); if ( !jQuery.support.opacity ) { jQuery.cssHooks.opacity = { get: function( elem, computed ) { // IE uses filters for opacity return ropacity.test( (computed && elem.currentStyle ? elem.currentStyle.filter : elem.style.filter) || "" ) ? ( 0.01 * parseFloat( RegExp.$1 ) ) + "" : computed ? "1" : ""; }, set: function( elem, value ) { var style = elem.style, currentStyle = elem.currentStyle, opacity = jQuery.isNumeric( value ) ? "alpha(opacity=" + value * 100 + ")" : "", filter = currentStyle && currentStyle.filter || style.filter || ""; // IE has trouble with opacity if it does not have layout // Force it by setting the zoom level style.zoom = 1; // if setting opacity to 1, and no other filters exist - attempt to remove filter attribute #6652 if ( value >= 1 && jQuery.trim( filter.replace( ralpha, "" ) ) === "" && style.removeAttribute ) { // Setting style.filter to null, "" & " " still leave "filter:" in the cssText // if "filter:" is present at all, clearType is disabled, we want to avoid this // style.removeAttribute is IE Only, but so apparently is this code path... style.removeAttribute( "filter" ); // if there there is no filter style applied in a css rule, we are done if ( currentStyle && !currentStyle.filter ) { return; } } // otherwise, set new filter values style.filter = ralpha.test( filter ) ? filter.replace( ralpha, opacity ) : filter + " " + opacity; } }; } // These hooks cannot be added until DOM ready because the support test // for it is not run until after DOM ready jQuery(function() { if ( !jQuery.support.reliableMarginRight ) { jQuery.cssHooks.marginRight = { get: function( elem, computed ) { // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right // Work around by temporarily setting element display to inline-block return jQuery.swap( elem, { "display": "inline-block" }, function() { if ( computed ) { return curCSS( elem, "marginRight" ); } }); } }; } // Webkit bug: https://bugs.webkit.org/show_bug.cgi?id=29084 // getComputedStyle returns percent when specified for top/left/bottom/right // rather than make the css module depend on the offset module, we just check for it here if ( !jQuery.support.pixelPosition && jQuery.fn.position ) { jQuery.each( [ "top", "left" ], function( i, prop ) { jQuery.cssHooks[ prop ] = { get: function( elem, computed ) { if ( computed ) { var ret = curCSS( elem, prop ); // if curCSS returns percentage, fallback to offset return rnumnonpx.test( ret ) ? jQuery( elem ).position()[ prop ] + "px" : ret; } } }; }); } }); if ( jQuery.expr && jQuery.expr.filters ) { jQuery.expr.filters.hidden = function( elem ) { return ( elem.offsetWidth === 0 && elem.offsetHeight === 0 ) || (!jQuery.support.reliableHiddenOffsets && ((elem.style && elem.style.display) || curCSS( elem, "display" )) === "none"); }; jQuery.expr.filters.visible = function( elem ) { return !jQuery.expr.filters.hidden( elem ); }; } // These hooks are used by animate to expand properties jQuery.each({ margin: "", padding: "", border: "Width" }, function( prefix, suffix ) { jQuery.cssHooks[ prefix + suffix ] = { expand: function( value ) { var i, // assumes a single number if not a string parts = typeof value === "string" ? value.split(" ") : [ value ], expanded = {}; for ( i = 0; i < 4; i++ ) { expanded[ prefix + cssExpand[ i ] + suffix ] = parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; } return expanded; } }; if ( !rmargin.test( prefix ) ) { jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; } }); var r20 = /%20/g, rbracket = /\[\]$/, rCRLF = /\r?\n/g, rinput = /^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i, rselectTextarea = /^(?:select|textarea)/i; jQuery.fn.extend({ serialize: function() { return jQuery.param( this.serializeArray() ); }, serializeArray: function() { return this.map(function(){ return this.elements ? jQuery.makeArray( this.elements ) : this; }) .filter(function(){ return this.name && !this.disabled && ( this.checked || rselectTextarea.test( this.nodeName ) || rinput.test( this.type ) ); }) .map(function( i, elem ){ var val = jQuery( this ).val(); return val == null ? null : jQuery.isArray( val ) ? jQuery.map( val, function( val, i ){ return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; }) : { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; }).get(); } }); //Serialize an array of form elements or a set of //key/values into a query string jQuery.param = function( a, traditional ) { var prefix, s = [], add = function( key, value ) { // If value is a function, invoke it and return its value value = jQuery.isFunction( value ) ? value() : ( value == null ? "" : value ); s[ s.length ] = encodeURIComponent( key ) + "=" + encodeURIComponent( value ); }; // Set traditional to true for jQuery <= 1.3.2 behavior. if ( traditional === undefined ) { traditional = jQuery.ajaxSettings && jQuery.ajaxSettings.traditional; } // If an array was passed in, assume that it is an array of form elements. if ( jQuery.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { // Serialize the form elements jQuery.each( a, function() { add( this.name, this.value ); }); } else { // If traditional, encode the "old" way (the way 1.3.2 or older // did it), otherwise encode params recursively. for ( prefix in a ) { buildParams( prefix, a[ prefix ], traditional, add ); } } // Return the resulting serialization return s.join( "&" ).replace( r20, "+" ); }; function buildParams( prefix, obj, traditional, add ) { var name; if ( jQuery.isArray( obj ) ) { // Serialize array item. jQuery.each( obj, function( i, v ) { if ( traditional || rbracket.test( prefix ) ) { // Treat each array item as a scalar. add( prefix, v ); } else { // If array item is non-scalar (array or object), encode its // numeric index to resolve deserialization ambiguity issues. // Note that rack (as of 1.0.0) can't currently deserialize // nested arrays properly, and attempting to do so may cause // a server error. Possible fixes are to modify rack's // deserialization algorithm or to provide an option or flag // to force array serialization to be shallow. buildParams( prefix + "[" + ( typeof v === "object" ? i : "" ) + "]", v, traditional, add ); } }); } else if ( !traditional && jQuery.type( obj ) === "object" ) { // Serialize object item. for ( name in obj ) { buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); } } else { // Serialize scalar item. add( prefix, obj ); } } var // Document location ajaxLocParts, ajaxLocation, rhash = /#.*$/, rheaders = /^(.*?):[ \t]*([^\r\n]*)\r?$/mg, // IE leaves an \r character at EOL // #7653, #8125, #8152: local protocol detection rlocalProtocol = /^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/, rnoContent = /^(?:GET|HEAD)$/, rprotocol = /^\/\//, rquery = /\?/, rscript = /)<[^<]*)*<\/script>/gi, rts = /([?&])_=[^&]*/, rurl = /^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/, // Keep a copy of the old load method _load = jQuery.fn.load, /* Prefilters * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) * 2) These are called: * - BEFORE asking for a transport * - AFTER param serialization (s.data is a string if s.processData is true) * 3) key is the dataType * 4) the catchall symbol "*" can be used * 5) execution will start with transport dataType and THEN continue down to "*" if needed */ prefilters = {}, /* Transports bindings * 1) key is the dataType * 2) the catchall symbol "*" can be used * 3) selection will start with transport dataType and THEN go to "*" if needed */ transports = {}, // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression allTypes = ["*/"] + ["*"]; // #8138, IE may throw an exception when accessing // a field from window.location if document.domain has been set try { ajaxLocation = location.href; } catch( e ) { // Use the href attribute of an A element // since IE will modify it given document.location ajaxLocation = document.createElement( "a" ); ajaxLocation.href = ""; ajaxLocation = ajaxLocation.href; } // Segment location into parts ajaxLocParts = rurl.exec( ajaxLocation.toLowerCase() ) || []; // Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport function addToPrefiltersOrTransports( structure ) { // dataTypeExpression is optional and defaults to "*" return function( dataTypeExpression, func ) { if ( typeof dataTypeExpression !== "string" ) { func = dataTypeExpression; dataTypeExpression = "*"; } var dataType, list, placeBefore, dataTypes = dataTypeExpression.toLowerCase().split( core_rspace ), i = 0, length = dataTypes.length; if ( jQuery.isFunction( func ) ) { // For each dataType in the dataTypeExpression for ( ; i < length; i++ ) { dataType = dataTypes[ i ]; // We control if we're asked to add before // any existing element placeBefore = /^\+/.test( dataType ); if ( placeBefore ) { dataType = dataType.substr( 1 ) || "*"; } list = structure[ dataType ] = structure[ dataType ] || []; // then we add to the structure accordingly list[ placeBefore ? "unshift" : "push" ]( func ); } } }; } // Base inspection function for prefilters and transports function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR, dataType /* internal */, inspected /* internal */ ) { dataType = dataType || options.dataTypes[ 0 ]; inspected = inspected || {}; inspected[ dataType ] = true; var selection, list = structure[ dataType ], i = 0, length = list ? list.length : 0, executeOnly = ( structure === prefilters ); for ( ; i < length && ( executeOnly || !selection ); i++ ) { selection = list[ i ]( options, originalOptions, jqXHR ); // If we got redirected to another dataType // we try there if executing only and not done already if ( typeof selection === "string" ) { if ( !executeOnly || inspected[ selection ] ) { selection = undefined; } else { options.dataTypes.unshift( selection ); selection = inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR, selection, inspected ); } } } // If we're only executing or nothing was selected // we try the catchall dataType if not done already if ( ( executeOnly || !selection ) && !inspected[ "*" ] ) { selection = inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR, "*", inspected ); } // unnecessary when only executing (prefilters) // but it'll be ignored by the caller in that case return selection; } // A special extend for ajax options // that takes "flat" options (not to be deep extended) // Fixes #9887 function ajaxExtend( target, src ) { var key, deep, flatOptions = jQuery.ajaxSettings.flatOptions || {}; for ( key in src ) { if ( src[ key ] !== undefined ) { ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; } } if ( deep ) { jQuery.extend( true, target, deep ); } } jQuery.fn.load = function( url, params, callback ) { if ( typeof url !== "string" && _load ) { return _load.apply( this, arguments ); } // Don't do a request if no elements are being requested if ( !this.length ) { return this; } var selector, type, response, self = this, off = url.indexOf(" "); if ( off >= 0 ) { selector = url.slice( off, url.length ); url = url.slice( 0, off ); } // If it's a function if ( jQuery.isFunction( params ) ) { // We assume that it's the callback callback = params; params = undefined; // Otherwise, build a param string } else if ( params && typeof params === "object" ) { type = "POST"; } // Request the remote document jQuery.ajax({ url: url, // if "type" variable is undefined, then "GET" method will be used type: type, dataType: "html", data: params, complete: function( jqXHR, status ) { if ( callback ) { self.each( callback, response || [ jqXHR.responseText, status, jqXHR ] ); } } }).done(function( responseText ) { // Save response for use in complete callback response = arguments; // See if a selector was specified self.html( selector ? // Create a dummy div to hold the results jQuery("
") // inject the contents of the document in, removing the scripts // to avoid any 'Permission Denied' errors in IE .append( responseText.replace( rscript, "" ) ) // Locate the specified elements .find( selector ) : // If not, just inject the full result responseText ); }); return this; }; // Attach a bunch of functions for handling common AJAX events jQuery.each( "ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split( " " ), function( i, o ){ jQuery.fn[ o ] = function( f ){ return this.on( o, f ); }; }); jQuery.each( [ "get", "post" ], function( i, method ) { jQuery[ method ] = function( url, data, callback, type ) { // shift arguments if data argument was omitted if ( jQuery.isFunction( data ) ) { type = type || callback; callback = data; data = undefined; } return jQuery.ajax({ type: method, url: url, data: data, success: callback, dataType: type }); }; }); jQuery.extend({ getScript: function( url, callback ) { return jQuery.get( url, undefined, callback, "script" ); }, getJSON: function( url, data, callback ) { return jQuery.get( url, data, callback, "json" ); }, // Creates a full fledged settings object into target // with both ajaxSettings and settings fields. // If target is omitted, writes into ajaxSettings. ajaxSetup: function( target, settings ) { if ( settings ) { // Building a settings object ajaxExtend( target, jQuery.ajaxSettings ); } else { // Extending ajaxSettings settings = target; target = jQuery.ajaxSettings; } ajaxExtend( target, settings ); return target; }, ajaxSettings: { url: ajaxLocation, isLocal: rlocalProtocol.test( ajaxLocParts[ 1 ] ), global: true, type: "GET", contentType: "application/x-www-form-urlencoded; charset=UTF-8", processData: true, async: true, /* timeout: 0, data: null, dataType: null, username: null, password: null, cache: null, throws: false, traditional: false, headers: {}, */ accepts: { xml: "application/xml, text/xml", html: "text/html", text: "text/plain", json: "application/json, text/javascript", "*": allTypes }, contents: { xml: /xml/, html: /html/, json: /json/ }, responseFields: { xml: "responseXML", text: "responseText" }, // List of data converters // 1) key format is "source_type destination_type" (a single space in-between) // 2) the catchall symbol "*" can be used for source_type converters: { // Convert anything to text "* text": window.String, // Text to html (true = no transformation) "text html": true, // Evaluate text as a json expression "text json": jQuery.parseJSON, // Parse text as xml "text xml": jQuery.parseXML }, // For options that shouldn't be deep extended: // you can add your own custom options here if // and when you create one that shouldn't be // deep extended (see ajaxExtend) flatOptions: { context: true, url: true } }, ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), ajaxTransport: addToPrefiltersOrTransports( transports ), // Main method ajax: function( url, options ) { // If url is an object, simulate pre-1.5 signature if ( typeof url === "object" ) { options = url; url = undefined; } // Force options to be an object options = options || {}; var // ifModified key ifModifiedKey, // Response headers responseHeadersString, responseHeaders, // transport transport, // timeout handle timeoutTimer, // Cross-domain detection vars parts, // To know if global events are to be dispatched fireGlobals, // Loop variable i, // Create the final options object s = jQuery.ajaxSetup( {}, options ), // Callbacks context callbackContext = s.context || s, // Context for global events // It's the callbackContext if one was provided in the options // and if it's a DOM node or a jQuery collection globalEventContext = callbackContext !== s && ( callbackContext.nodeType || callbackContext instanceof jQuery ) ? jQuery( callbackContext ) : jQuery.event, // Deferreds deferred = jQuery.Deferred(), completeDeferred = jQuery.Callbacks( "once memory" ), // Status-dependent callbacks statusCode = s.statusCode || {}, // Headers (they are sent all at once) requestHeaders = {}, requestHeadersNames = {}, // The jqXHR state state = 0, // Default abort message strAbort = "canceled", // Fake xhr jqXHR = { readyState: 0, // Caches the header setRequestHeader: function( name, value ) { if ( !state ) { var lname = name.toLowerCase(); name = requestHeadersNames[ lname ] = requestHeadersNames[ lname ] || name; requestHeaders[ name ] = value; } return this; }, // Raw string getAllResponseHeaders: function() { return state === 2 ? responseHeadersString : null; }, // Builds headers hashtable if needed getResponseHeader: function( key ) { var match; if ( state === 2 ) { if ( !responseHeaders ) { responseHeaders = {}; while( ( match = rheaders.exec( responseHeadersString ) ) ) { responseHeaders[ match[1].toLowerCase() ] = match[ 2 ]; } } match = responseHeaders[ key.toLowerCase() ]; } return match === undefined ? null : match; }, // Overrides response content-type header overrideMimeType: function( type ) { if ( !state ) { s.mimeType = type; } return this; }, // Cancel the request abort: function( statusText ) { statusText = statusText || strAbort; if ( transport ) { transport.abort( statusText ); } done( 0, statusText ); return this; } }; // Callback for when everything is done // It is defined here because jslint complains if it is declared // at the end of the function (which would be more logical and readable) function done( status, nativeStatusText, responses, headers ) { var isSuccess, success, error, response, modified, statusText = nativeStatusText; // Called once if ( state === 2 ) { return; } // State is "done" now state = 2; // Clear timeout if it exists if ( timeoutTimer ) { clearTimeout( timeoutTimer ); } // Dereference transport for early garbage collection // (no matter how long the jqXHR object will be used) transport = undefined; // Cache response headers responseHeadersString = headers || ""; // Set readyState jqXHR.readyState = status > 0 ? 4 : 0; // Get response data if ( responses ) { response = ajaxHandleResponses( s, jqXHR, responses ); } // If successful, handle type chaining if ( status >= 200 && status < 300 || status === 304 ) { // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. if ( s.ifModified ) { modified = jqXHR.getResponseHeader("Last-Modified"); if ( modified ) { jQuery.lastModified[ ifModifiedKey ] = modified; } modified = jqXHR.getResponseHeader("Etag"); if ( modified ) { jQuery.etag[ ifModifiedKey ] = modified; } } // If not modified if ( status === 304 ) { statusText = "notmodified"; isSuccess = true; // If we have data } else { isSuccess = ajaxConvert( s, response ); statusText = isSuccess.state; success = isSuccess.data; error = isSuccess.error; isSuccess = !error; } } else { // We extract error from statusText // then normalize statusText and status for non-aborts error = statusText; if ( !statusText || status ) { statusText = "error"; if ( status < 0 ) { status = 0; } } } // Set data for the fake xhr object jqXHR.status = status; jqXHR.statusText = ( nativeStatusText || statusText ) + ""; // Success/Error if ( isSuccess ) { deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); } else { deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); } // Status-dependent callbacks jqXHR.statusCode( statusCode ); statusCode = undefined; if ( fireGlobals ) { globalEventContext.trigger( "ajax" + ( isSuccess ? "Success" : "Error" ), [ jqXHR, s, isSuccess ? success : error ] ); } // Complete completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); if ( fireGlobals ) { globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); // Handle the global AJAX counter if ( !( --jQuery.active ) ) { jQuery.event.trigger( "ajaxStop" ); } } } // Attach deferreds deferred.promise( jqXHR ); jqXHR.success = jqXHR.done; jqXHR.error = jqXHR.fail; jqXHR.complete = completeDeferred.add; // Status-dependent callbacks jqXHR.statusCode = function( map ) { if ( map ) { var tmp; if ( state < 2 ) { for ( tmp in map ) { statusCode[ tmp ] = [ statusCode[tmp], map[tmp] ]; } } else { tmp = map[ jqXHR.status ]; jqXHR.always( tmp ); } } return this; }; // Remove hash character (#7531: and string promotion) // Add protocol if not provided (#5866: IE7 issue with protocol-less urls) // We also use the url parameter if available s.url = ( ( url || s.url ) + "" ).replace( rhash, "" ).replace( rprotocol, ajaxLocParts[ 1 ] + "//" ); // Extract dataTypes list s.dataTypes = jQuery.trim( s.dataType || "*" ).toLowerCase().split( core_rspace ); // A cross-domain request is in order when we have a protocol:host:port mismatch if ( s.crossDomain == null ) { parts = rurl.exec( s.url.toLowerCase() ); s.crossDomain = !!( parts && ( parts[ 1 ] !== ajaxLocParts[ 1 ] || parts[ 2 ] !== ajaxLocParts[ 2 ] || ( parts[ 3 ] || ( parts[ 1 ] === "http:" ? 80 : 443 ) ) != ( ajaxLocParts[ 3 ] || ( ajaxLocParts[ 1 ] === "http:" ? 80 : 443 ) ) ) ); } // Convert data if not already a string if ( s.data && s.processData && typeof s.data !== "string" ) { s.data = jQuery.param( s.data, s.traditional ); } // Apply prefilters inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); // If request was aborted inside a prefilter, stop there if ( state === 2 ) { return jqXHR; } // We can fire global events as of now if asked to fireGlobals = s.global; // Uppercase the type s.type = s.type.toUpperCase(); // Determine if request has content s.hasContent = !rnoContent.test( s.type ); // Watch for a new set of requests if ( fireGlobals && jQuery.active++ === 0 ) { jQuery.event.trigger( "ajaxStart" ); } // More options handling for requests with no content if ( !s.hasContent ) { // If data is available, append data to url if ( s.data ) { s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.data; // #9682: remove data so that it's not used in an eventual retry delete s.data; } // Get ifModifiedKey before adding the anti-cache parameter ifModifiedKey = s.url; // Add anti-cache in url if needed if ( s.cache === false ) { var ts = jQuery.now(), // try replacing _= if it is there ret = s.url.replace( rts, "$1_=" + ts ); // if nothing was replaced, add timestamp to the end s.url = ret + ( ( ret === s.url ) ? ( rquery.test( s.url ) ? "&" : "?" ) + "_=" + ts : "" ); } } // Set the correct header, if data is being sent if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { jqXHR.setRequestHeader( "Content-Type", s.contentType ); } // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. if ( s.ifModified ) { ifModifiedKey = ifModifiedKey || s.url; if ( jQuery.lastModified[ ifModifiedKey ] ) { jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ ifModifiedKey ] ); } if ( jQuery.etag[ ifModifiedKey ] ) { jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ ifModifiedKey ] ); } } // Set the Accepts header for the server, depending on the dataType jqXHR.setRequestHeader( "Accept", s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[0] ] ? s.accepts[ s.dataTypes[0] ] + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : s.accepts[ "*" ] ); // Check for headers option for ( i in s.headers ) { jqXHR.setRequestHeader( i, s.headers[ i ] ); } // Allow custom headers/mimetypes and early abort if ( s.beforeSend && ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || state === 2 ) ) { // Abort if not done already and return return jqXHR.abort(); } // aborting is no longer a cancellation strAbort = "abort"; // Install callbacks on deferreds for ( i in { success: 1, error: 1, complete: 1 } ) { jqXHR[ i ]( s[ i ] ); } // Get transport transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); // If no transport, we auto-abort if ( !transport ) { done( -1, "No Transport" ); } else { jqXHR.readyState = 1; // Send global event if ( fireGlobals ) { globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); } // Timeout if ( s.async && s.timeout > 0 ) { timeoutTimer = setTimeout( function(){ jqXHR.abort( "timeout" ); }, s.timeout ); } try { state = 1; transport.send( requestHeaders, done ); } catch (e) { // Propagate exception as error if not done if ( state < 2 ) { done( -1, e ); // Simply rethrow otherwise } else { throw e; } } } return jqXHR; }, // Counter for holding the number of active queries active: 0, // Last-Modified header cache for next request lastModified: {}, etag: {} }); /* Handles responses to an ajax request: * - sets all responseXXX fields accordingly * - finds the right dataType (mediates between content-type and expected dataType) * - returns the corresponding response */ function ajaxHandleResponses( s, jqXHR, responses ) { var ct, type, finalDataType, firstDataType, contents = s.contents, dataTypes = s.dataTypes, responseFields = s.responseFields; // Fill responseXXX fields for ( type in responseFields ) { if ( type in responses ) { jqXHR[ responseFields[type] ] = responses[ type ]; } } // Remove auto dataType and get content-type in the process while( dataTypes[ 0 ] === "*" ) { dataTypes.shift(); if ( ct === undefined ) { ct = s.mimeType || jqXHR.getResponseHeader( "content-type" ); } } // Check if we're dealing with a known content-type if ( ct ) { for ( type in contents ) { if ( contents[ type ] && contents[ type ].test( ct ) ) { dataTypes.unshift( type ); break; } } } // Check to see if we have a response for the expected dataType if ( dataTypes[ 0 ] in responses ) { finalDataType = dataTypes[ 0 ]; } else { // Try convertible dataTypes for ( type in responses ) { if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[0] ] ) { finalDataType = type; break; } if ( !firstDataType ) { firstDataType = type; } } // Or just use first one finalDataType = finalDataType || firstDataType; } // If we found a dataType // We add the dataType to the list if needed // and return the corresponding response if ( finalDataType ) { if ( finalDataType !== dataTypes[ 0 ] ) { dataTypes.unshift( finalDataType ); } return responses[ finalDataType ]; } } // Chain conversions given the request and the original response function ajaxConvert( s, response ) { var conv, conv2, current, tmp, // Work with a copy of dataTypes in case we need to modify it for conversion dataTypes = s.dataTypes.slice(), prev = dataTypes[ 0 ], converters = {}, i = 0; // Apply the dataFilter if provided if ( s.dataFilter ) { response = s.dataFilter( response, s.dataType ); } // Create converters map with lowercased keys if ( dataTypes[ 1 ] ) { for ( conv in s.converters ) { converters[ conv.toLowerCase() ] = s.converters[ conv ]; } } // Convert to each sequential dataType, tolerating list modification for ( ; (current = dataTypes[++i]); ) { // There's only work to do if current dataType is non-auto if ( current !== "*" ) { // Convert response if prev dataType is non-auto and differs from current if ( prev !== "*" && prev !== current ) { // Seek a direct converter conv = converters[ prev + " " + current ] || converters[ "* " + current ]; // If none found, seek a pair if ( !conv ) { for ( conv2 in converters ) { // If conv2 outputs current tmp = conv2.split(" "); if ( tmp[ 1 ] === current ) { // If prev can be converted to accepted input conv = converters[ prev + " " + tmp[ 0 ] ] || converters[ "* " + tmp[ 0 ] ]; if ( conv ) { // Condense equivalence converters if ( conv === true ) { conv = converters[ conv2 ]; // Otherwise, insert the intermediate dataType } else if ( converters[ conv2 ] !== true ) { current = tmp[ 0 ]; dataTypes.splice( i--, 0, current ); } break; } } } } // Apply converter (if not an equivalence) if ( conv !== true ) { // Unless errors are allowed to bubble, catch and return them if ( conv && s["throws"] ) { response = conv( response ); } else { try { response = conv( response ); } catch ( e ) { return { state: "parsererror", error: conv ? e : "No conversion from " + prev + " to " + current }; } } } } // Update prev for next iteration prev = current; } } return { state: "success", data: response }; } var oldCallbacks = [], rquestion = /\?/, rjsonp = /(=)\?(?=&|$)|\?\?/, nonce = jQuery.now(); // Default jsonp settings jQuery.ajaxSetup({ jsonp: "callback", jsonpCallback: function() { var callback = oldCallbacks.pop() || ( jQuery.expando + "_" + ( nonce++ ) ); this[ callback ] = true; return callback; } }); // Detect, normalize options and install callbacks for jsonp requests jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) { var callbackName, overwritten, responseContainer, data = s.data, url = s.url, hasCallback = s.jsonp !== false, replaceInUrl = hasCallback && rjsonp.test( url ), replaceInData = hasCallback && !replaceInUrl && typeof data === "string" && !( s.contentType || "" ).indexOf("application/x-www-form-urlencoded") && rjsonp.test( data ); // Handle iff the expected data type is "jsonp" or we have a parameter to set if ( s.dataTypes[ 0 ] === "jsonp" || replaceInUrl || replaceInData ) { // Get callback name, remembering preexisting value associated with it callbackName = s.jsonpCallback = jQuery.isFunction( s.jsonpCallback ) ? s.jsonpCallback() : s.jsonpCallback; overwritten = window[ callbackName ]; // Insert callback into url or form data if ( replaceInUrl ) { s.url = url.replace( rjsonp, "$1" + callbackName ); } else if ( replaceInData ) { s.data = data.replace( rjsonp, "$1" + callbackName ); } else if ( hasCallback ) { s.url += ( rquestion.test( url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName; } // Use data converter to retrieve json after script execution s.converters["script json"] = function() { if ( !responseContainer ) { jQuery.error( callbackName + " was not called" ); } return responseContainer[ 0 ]; }; // force json dataType s.dataTypes[ 0 ] = "json"; // Install callback window[ callbackName ] = function() { responseContainer = arguments; }; // Clean-up function (fires after converters) jqXHR.always(function() { // Restore preexisting value window[ callbackName ] = overwritten; // Save back as free if ( s[ callbackName ] ) { // make sure that re-using the options doesn't screw things around s.jsonpCallback = originalSettings.jsonpCallback; // save the callback name for future use oldCallbacks.push( callbackName ); } // Call if it was a function and we have a response if ( responseContainer && jQuery.isFunction( overwritten ) ) { overwritten( responseContainer[ 0 ] ); } responseContainer = overwritten = undefined; }); // Delegate to script return "script"; } }); // Install script dataType jQuery.ajaxSetup({ accepts: { script: "text/javascript, application/javascript, application/ecmascript, application/x-ecmascript" }, contents: { script: /javascript|ecmascript/ }, converters: { "text script": function( text ) { jQuery.globalEval( text ); return text; } } }); // Handle cache's special case and global jQuery.ajaxPrefilter( "script", function( s ) { if ( s.cache === undefined ) { s.cache = false; } if ( s.crossDomain ) { s.type = "GET"; s.global = false; } }); // Bind script tag hack transport jQuery.ajaxTransport( "script", function(s) { // This transport only deals with cross domain requests if ( s.crossDomain ) { var script, head = document.head || document.getElementsByTagName( "head" )[0] || document.documentElement; return { send: function( _, callback ) { script = document.createElement( "script" ); script.async = "async"; if ( s.scriptCharset ) { script.charset = s.scriptCharset; } script.src = s.url; // Attach handlers for all browsers script.onload = script.onreadystatechange = function( _, isAbort ) { if ( isAbort || !script.readyState || /loaded|complete/.test( script.readyState ) ) { // Handle memory leak in IE script.onload = script.onreadystatechange = null; // Remove the script if ( head && script.parentNode ) { head.removeChild( script ); } // Dereference the script script = undefined; // Callback if not abort if ( !isAbort ) { callback( 200, "success" ); } } }; // Use insertBefore instead of appendChild to circumvent an IE6 bug. // This arises when a base node is used (#2709 and #4378). head.insertBefore( script, head.firstChild ); }, abort: function() { if ( script ) { script.onload( 0, 1 ); } } }; } }); var xhrCallbacks, // #5280: Internet Explorer will keep connections alive if we don't abort on unload xhrOnUnloadAbort = window.ActiveXObject ? function() { // Abort all pending requests for ( var key in xhrCallbacks ) { xhrCallbacks[ key ]( 0, 1 ); } } : false, xhrId = 0; // Functions to create xhrs function createStandardXHR() { try { return new window.XMLHttpRequest(); } catch( e ) {} } function createActiveXHR() { try { return new window.ActiveXObject( "Microsoft.XMLHTTP" ); } catch( e ) {} } // Create the request object // (This is still attached to ajaxSettings for backward compatibility) jQuery.ajaxSettings.xhr = window.ActiveXObject ? /* Microsoft failed to properly * implement the XMLHttpRequest in IE7 (can't request local files), * so we use the ActiveXObject when it is available * Additionally XMLHttpRequest can be disabled in IE7/IE8 so * we need a fallback. */ function() { return !this.isLocal && createStandardXHR() || createActiveXHR(); } : // For all other browsers, use the standard XMLHttpRequest object createStandardXHR; // Determine support properties (function( xhr ) { jQuery.extend( jQuery.support, { ajax: !!xhr, cors: !!xhr && ( "withCredentials" in xhr ) }); })( jQuery.ajaxSettings.xhr() ); // Create transport if the browser can provide an xhr if ( jQuery.support.ajax ) { jQuery.ajaxTransport(function( s ) { // Cross domain only allowed if supported through XMLHttpRequest if ( !s.crossDomain || jQuery.support.cors ) { var callback; return { send: function( headers, complete ) { // Get a new xhr var handle, i, xhr = s.xhr(); // Open the socket // Passing null username, generates a login popup on Opera (#2865) if ( s.username ) { xhr.open( s.type, s.url, s.async, s.username, s.password ); } else { xhr.open( s.type, s.url, s.async ); } // Apply custom fields if provided if ( s.xhrFields ) { for ( i in s.xhrFields ) { xhr[ i ] = s.xhrFields[ i ]; } } // Override mime type if needed if ( s.mimeType && xhr.overrideMimeType ) { xhr.overrideMimeType( s.mimeType ); } // X-Requested-With header // For cross-domain requests, seeing as conditions for a preflight are // akin to a jigsaw puzzle, we simply never set it to be sure. // (it can always be set on a per-request basis or even using ajaxSetup) // For same-domain requests, won't change header if already provided. if ( !s.crossDomain && !headers["X-Requested-With"] ) { headers[ "X-Requested-With" ] = "XMLHttpRequest"; } // Need an extra try/catch for cross domain requests in Firefox 3 try { for ( i in headers ) { xhr.setRequestHeader( i, headers[ i ] ); } } catch( _ ) {} // Do send the request // This may raise an exception which is actually // handled in jQuery.ajax (so no try/catch here) xhr.send( ( s.hasContent && s.data ) || null ); // Listener callback = function( _, isAbort ) { var status, statusText, responseHeaders, responses, xml; // Firefox throws exceptions when accessing properties // of an xhr when a network error occurred // http://helpful.knobs-dials.com/index.php/Component_returned_failure_code:_0x80040111_(NS_ERROR_NOT_AVAILABLE) try { // Was never called and is aborted or complete if ( callback && ( isAbort || xhr.readyState === 4 ) ) { // Only called once callback = undefined; // Do not keep as active anymore if ( handle ) { xhr.onreadystatechange = jQuery.noop; if ( xhrOnUnloadAbort ) { delete xhrCallbacks[ handle ]; } } // If it's an abort if ( isAbort ) { // Abort it manually if needed if ( xhr.readyState !== 4 ) { xhr.abort(); } } else { status = xhr.status; responseHeaders = xhr.getAllResponseHeaders(); responses = {}; xml = xhr.responseXML; // Construct response list if ( xml && xml.documentElement /* #4958 */ ) { responses.xml = xml; } // When requesting binary data, IE6-9 will throw an exception // on any attempt to access responseText (#11426) try { responses.text = xhr.responseText; } catch( e ) { } // Firefox throws an exception when accessing // statusText for faulty cross-domain requests try { statusText = xhr.statusText; } catch( e ) { // We normalize with Webkit giving an empty statusText statusText = ""; } // Filter status for non standard behaviors // If the request is local and we have data: assume a success // (success with no data won't get notified, that's the best we // can do given current implementations) if ( !status && s.isLocal && !s.crossDomain ) { status = responses.text ? 200 : 404; // IE - #1450: sometimes returns 1223 when it should be 204 } else if ( status === 1223 ) { status = 204; } } } } catch( firefoxAccessException ) { if ( !isAbort ) { complete( -1, firefoxAccessException ); } } // Call complete if needed if ( responses ) { complete( status, statusText, responses, responseHeaders ); } }; if ( !s.async ) { // if we're in sync mode we fire the callback callback(); } else if ( xhr.readyState === 4 ) { // (IE6 & IE7) if it's in cache and has been // retrieved directly we need to fire the callback setTimeout( callback, 0 ); } else { handle = ++xhrId; if ( xhrOnUnloadAbort ) { // Create the active xhrs callbacks list if needed // and attach the unload handler if ( !xhrCallbacks ) { xhrCallbacks = {}; jQuery( window ).unload( xhrOnUnloadAbort ); } // Add to list of active xhrs callbacks xhrCallbacks[ handle ] = callback; } xhr.onreadystatechange = callback; } }, abort: function() { if ( callback ) { callback(0,1); } } }; } }); } var fxNow, timerId, rfxtypes = /^(?:toggle|show|hide)$/, rfxnum = new RegExp( "^(?:([-+])=|)(" + core_pnum + ")([a-z%]*)$", "i" ), rrun = /queueHooks$/, animationPrefilters = [ defaultPrefilter ], tweeners = { "*": [function( prop, value ) { var end, unit, tween = this.createTween( prop, value ), parts = rfxnum.exec( value ), target = tween.cur(), start = +target || 0, scale = 1, maxIterations = 20; if ( parts ) { end = +parts[2]; unit = parts[3] || ( jQuery.cssNumber[ prop ] ? "" : "px" ); // We need to compute starting value if ( unit !== "px" && start ) { // Iteratively approximate from a nonzero starting point // Prefer the current property, because this process will be trivial if it uses the same units // Fallback to end or a simple constant start = jQuery.css( tween.elem, prop, true ) || end || 1; do { // If previous iteration zeroed out, double until we get *something* // Use a string for doubling factor so we don't accidentally see scale as unchanged below scale = scale || ".5"; // Adjust and apply start = start / scale; jQuery.style( tween.elem, prop, start + unit ); // Update scale, tolerating zero or NaN from tween.cur() // And breaking the loop if scale is unchanged or perfect, or if we've just had enough } while ( scale !== (scale = tween.cur() / target) && scale !== 1 && --maxIterations ); } tween.unit = unit; tween.start = start; // If a +=/-= token was provided, we're doing a relative animation tween.end = parts[1] ? start + ( parts[1] + 1 ) * end : end; } return tween; }] }; // Animations created synchronously will run synchronously function createFxNow() { setTimeout(function() { fxNow = undefined; }, 0 ); return ( fxNow = jQuery.now() ); } function createTweens( animation, props ) { jQuery.each( props, function( prop, value ) { var collection = ( tweeners[ prop ] || [] ).concat( tweeners[ "*" ] ), index = 0, length = collection.length; for ( ; index < length; index++ ) { if ( collection[ index ].call( animation, prop, value ) ) { // we're done with this property return; } } }); } function Animation( elem, properties, options ) { var result, index = 0, tweenerIndex = 0, length = animationPrefilters.length, deferred = jQuery.Deferred().always( function() { // don't match elem in the :animated selector delete tick.elem; }), tick = function() { var currentTime = fxNow || createFxNow(), remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), // archaic crash bug won't allow us to use 1 - ( 0.5 || 0 ) (#12497) temp = remaining / animation.duration || 0, percent = 1 - temp, index = 0, length = animation.tweens.length; for ( ; index < length ; index++ ) { animation.tweens[ index ].run( percent ); } deferred.notifyWith( elem, [ animation, percent, remaining ]); if ( percent < 1 && length ) { return remaining; } else { deferred.resolveWith( elem, [ animation ] ); return false; } }, animation = deferred.promise({ elem: elem, props: jQuery.extend( {}, properties ), opts: jQuery.extend( true, { specialEasing: {} }, options ), originalProperties: properties, originalOptions: options, startTime: fxNow || createFxNow(), duration: options.duration, tweens: [], createTween: function( prop, end, easing ) { var tween = jQuery.Tween( elem, animation.opts, prop, end, animation.opts.specialEasing[ prop ] || animation.opts.easing ); animation.tweens.push( tween ); return tween; }, stop: function( gotoEnd ) { var index = 0, // if we are going to the end, we want to run all the tweens // otherwise we skip this part length = gotoEnd ? animation.tweens.length : 0; for ( ; index < length ; index++ ) { animation.tweens[ index ].run( 1 ); } // resolve when we played the last frame // otherwise, reject if ( gotoEnd ) { deferred.resolveWith( elem, [ animation, gotoEnd ] ); } else { deferred.rejectWith( elem, [ animation, gotoEnd ] ); } return this; } }), props = animation.props; propFilter( props, animation.opts.specialEasing ); for ( ; index < length ; index++ ) { result = animationPrefilters[ index ].call( animation, elem, props, animation.opts ); if ( result ) { return result; } } createTweens( animation, props ); if ( jQuery.isFunction( animation.opts.start ) ) { animation.opts.start.call( elem, animation ); } jQuery.fx.timer( jQuery.extend( tick, { anim: animation, queue: animation.opts.queue, elem: elem }) ); // attach callbacks from options return animation.progress( animation.opts.progress ) .done( animation.opts.done, animation.opts.complete ) .fail( animation.opts.fail ) .always( animation.opts.always ); } function propFilter( props, specialEasing ) { var index, name, easing, value, hooks; // camelCase, specialEasing and expand cssHook pass for ( index in props ) { name = jQuery.camelCase( index ); easing = specialEasing[ name ]; value = props[ index ]; if ( jQuery.isArray( value ) ) { easing = value[ 1 ]; value = props[ index ] = value[ 0 ]; } if ( index !== name ) { props[ name ] = value; delete props[ index ]; } hooks = jQuery.cssHooks[ name ]; if ( hooks && "expand" in hooks ) { value = hooks.expand( value ); delete props[ name ]; // not quite $.extend, this wont overwrite keys already present. // also - reusing 'index' from above because we have the correct "name" for ( index in value ) { if ( !( index in props ) ) { props[ index ] = value[ index ]; specialEasing[ index ] = easing; } } } else { specialEasing[ name ] = easing; } } } jQuery.Animation = jQuery.extend( Animation, { tweener: function( props, callback ) { if ( jQuery.isFunction( props ) ) { callback = props; props = [ "*" ]; } else { props = props.split(" "); } var prop, index = 0, length = props.length; for ( ; index < length ; index++ ) { prop = props[ index ]; tweeners[ prop ] = tweeners[ prop ] || []; tweeners[ prop ].unshift( callback ); } }, prefilter: function( callback, prepend ) { if ( prepend ) { animationPrefilters.unshift( callback ); } else { animationPrefilters.push( callback ); } } }); function defaultPrefilter( elem, props, opts ) { var index, prop, value, length, dataShow, toggle, tween, hooks, oldfire, anim = this, style = elem.style, orig = {}, handled = [], hidden = elem.nodeType && isHidden( elem ); // handle queue: false promises if ( !opts.queue ) { hooks = jQuery._queueHooks( elem, "fx" ); if ( hooks.unqueued == null ) { hooks.unqueued = 0; oldfire = hooks.empty.fire; hooks.empty.fire = function() { if ( !hooks.unqueued ) { oldfire(); } }; } hooks.unqueued++; anim.always(function() { // doing this makes sure that the complete handler will be called // before this completes anim.always(function() { hooks.unqueued--; if ( !jQuery.queue( elem, "fx" ).length ) { hooks.empty.fire(); } }); }); } // height/width overflow pass if ( elem.nodeType === 1 && ( "height" in props || "width" in props ) ) { // Make sure that nothing sneaks out // Record all 3 overflow attributes because IE does not // change the overflow attribute when overflowX and // overflowY are set to the same value opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; // Set display property to inline-block for height/width // animations on inline elements that are having width/height animated if ( jQuery.css( elem, "display" ) === "inline" && jQuery.css( elem, "float" ) === "none" ) { // inline-level elements accept inline-block; // block-level elements need to be inline with layout if ( !jQuery.support.inlineBlockNeedsLayout || css_defaultDisplay( elem.nodeName ) === "inline" ) { style.display = "inline-block"; } else { style.zoom = 1; } } } if ( opts.overflow ) { style.overflow = "hidden"; if ( !jQuery.support.shrinkWrapBlocks ) { anim.done(function() { style.overflow = opts.overflow[ 0 ]; style.overflowX = opts.overflow[ 1 ]; style.overflowY = opts.overflow[ 2 ]; }); } } // show/hide pass for ( index in props ) { value = props[ index ]; if ( rfxtypes.exec( value ) ) { delete props[ index ]; toggle = toggle || value === "toggle"; if ( value === ( hidden ? "hide" : "show" ) ) { continue; } handled.push( index ); } } length = handled.length; if ( length ) { dataShow = jQuery._data( elem, "fxshow" ) || jQuery._data( elem, "fxshow", {} ); if ( "hidden" in dataShow ) { hidden = dataShow.hidden; } // store state if its toggle - enables .stop().toggle() to "reverse" if ( toggle ) { dataShow.hidden = !hidden; } if ( hidden ) { jQuery( elem ).show(); } else { anim.done(function() { jQuery( elem ).hide(); }); } anim.done(function() { var prop; jQuery.removeData( elem, "fxshow", true ); for ( prop in orig ) { jQuery.style( elem, prop, orig[ prop ] ); } }); for ( index = 0 ; index < length ; index++ ) { prop = handled[ index ]; tween = anim.createTween( prop, hidden ? dataShow[ prop ] : 0 ); orig[ prop ] = dataShow[ prop ] || jQuery.style( elem, prop ); if ( !( prop in dataShow ) ) { dataShow[ prop ] = tween.start; if ( hidden ) { tween.end = tween.start; tween.start = prop === "width" || prop === "height" ? 1 : 0; } } } } } function Tween( elem, options, prop, end, easing ) { return new Tween.prototype.init( elem, options, prop, end, easing ); } jQuery.Tween = Tween; Tween.prototype = { constructor: Tween, init: function( elem, options, prop, end, easing, unit ) { this.elem = elem; this.prop = prop; this.easing = easing || "swing"; this.options = options; this.start = this.now = this.cur(); this.end = end; this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); }, cur: function() { var hooks = Tween.propHooks[ this.prop ]; return hooks && hooks.get ? hooks.get( this ) : Tween.propHooks._default.get( this ); }, run: function( percent ) { var eased, hooks = Tween.propHooks[ this.prop ]; if ( this.options.duration ) { this.pos = eased = jQuery.easing[ this.easing ]( percent, this.options.duration * percent, 0, 1, this.options.duration ); } else { this.pos = eased = percent; } this.now = ( this.end - this.start ) * eased + this.start; if ( this.options.step ) { this.options.step.call( this.elem, this.now, this ); } if ( hooks && hooks.set ) { hooks.set( this ); } else { Tween.propHooks._default.set( this ); } return this; } }; Tween.prototype.init.prototype = Tween.prototype; Tween.propHooks = { _default: { get: function( tween ) { var result; if ( tween.elem[ tween.prop ] != null && (!tween.elem.style || tween.elem.style[ tween.prop ] == null) ) { return tween.elem[ tween.prop ]; } // passing any value as a 4th parameter to .css will automatically // attempt a parseFloat and fallback to a string if the parse fails // so, simple values such as "10px" are parsed to Float. // complex values such as "rotate(1rad)" are returned as is. result = jQuery.css( tween.elem, tween.prop, false, "" ); // Empty strings, null, undefined and "auto" are converted to 0. return !result || result === "auto" ? 0 : result; }, set: function( tween ) { // use step hook for back compat - use cssHook if its there - use .style if its // available and use plain properties where available if ( jQuery.fx.step[ tween.prop ] ) { jQuery.fx.step[ tween.prop ]( tween ); } else if ( tween.elem.style && ( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null || jQuery.cssHooks[ tween.prop ] ) ) { jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); } else { tween.elem[ tween.prop ] = tween.now; } } } }; // Remove in 2.0 - this supports IE8's panic based approach // to setting things on disconnected nodes Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { set: function( tween ) { if ( tween.elem.nodeType && tween.elem.parentNode ) { tween.elem[ tween.prop ] = tween.now; } } }; jQuery.each([ "toggle", "show", "hide" ], function( i, name ) { var cssFn = jQuery.fn[ name ]; jQuery.fn[ name ] = function( speed, easing, callback ) { return speed == null || typeof speed === "boolean" || // special check for .toggle( handler, handler, ... ) ( !i && jQuery.isFunction( speed ) && jQuery.isFunction( easing ) ) ? cssFn.apply( this, arguments ) : this.animate( genFx( name, true ), speed, easing, callback ); }; }); jQuery.fn.extend({ fadeTo: function( speed, to, easing, callback ) { // show any hidden elements after setting opacity to 0 return this.filter( isHidden ).css( "opacity", 0 ).show() // animate to the value specified .end().animate({ opacity: to }, speed, easing, callback ); }, animate: function( prop, speed, easing, callback ) { var empty = jQuery.isEmptyObject( prop ), optall = jQuery.speed( speed, easing, callback ), doAnimation = function() { // Operate on a copy of prop so per-property easing won't be lost var anim = Animation( this, jQuery.extend( {}, prop ), optall ); // Empty animations resolve immediately if ( empty ) { anim.stop( true ); } }; return empty || optall.queue === false ? this.each( doAnimation ) : this.queue( optall.queue, doAnimation ); }, stop: function( type, clearQueue, gotoEnd ) { var stopQueue = function( hooks ) { var stop = hooks.stop; delete hooks.stop; stop( gotoEnd ); }; if ( typeof type !== "string" ) { gotoEnd = clearQueue; clearQueue = type; type = undefined; } if ( clearQueue && type !== false ) { this.queue( type || "fx", [] ); } return this.each(function() { var dequeue = true, index = type != null && type + "queueHooks", timers = jQuery.timers, data = jQuery._data( this ); if ( index ) { if ( data[ index ] && data[ index ].stop ) { stopQueue( data[ index ] ); } } else { for ( index in data ) { if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { stopQueue( data[ index ] ); } } } for ( index = timers.length; index--; ) { if ( timers[ index ].elem === this && (type == null || timers[ index ].queue === type) ) { timers[ index ].anim.stop( gotoEnd ); dequeue = false; timers.splice( index, 1 ); } } // start the next in the queue if the last step wasn't forced // timers currently will call their complete callbacks, which will dequeue // but only if they were gotoEnd if ( dequeue || !gotoEnd ) { jQuery.dequeue( this, type ); } }); } }); // Generate parameters to create a standard animation function genFx( type, includeWidth ) { var which, attrs = { height: type }, i = 0; // if we include width, step value is 1 to do all cssExpand values, // if we don't include width, step value is 2 to skip over Left and Right includeWidth = includeWidth? 1 : 0; for( ; i < 4 ; i += 2 - includeWidth ) { which = cssExpand[ i ]; attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; } if ( includeWidth ) { attrs.opacity = attrs.width = type; } return attrs; } // Generate shortcuts for custom animations jQuery.each({ slideDown: genFx("show"), slideUp: genFx("hide"), slideToggle: genFx("toggle"), fadeIn: { opacity: "show" }, fadeOut: { opacity: "hide" }, fadeToggle: { opacity: "toggle" } }, function( name, props ) { jQuery.fn[ name ] = function( speed, easing, callback ) { return this.animate( props, speed, easing, callback ); }; }); jQuery.speed = function( speed, easing, fn ) { var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { complete: fn || !fn && easing || jQuery.isFunction( speed ) && speed, duration: speed, easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing }; opt.duration = jQuery.fx.off ? 0 : typeof opt.duration === "number" ? opt.duration : opt.duration in jQuery.fx.speeds ? jQuery.fx.speeds[ opt.duration ] : jQuery.fx.speeds._default; // normalize opt.queue - true/undefined/null -> "fx" if ( opt.queue == null || opt.queue === true ) { opt.queue = "fx"; } // Queueing opt.old = opt.complete; opt.complete = function() { if ( jQuery.isFunction( opt.old ) ) { opt.old.call( this ); } if ( opt.queue ) { jQuery.dequeue( this, opt.queue ); } }; return opt; }; jQuery.easing = { linear: function( p ) { return p; }, swing: function( p ) { return 0.5 - Math.cos( p*Math.PI ) / 2; } }; jQuery.timers = []; jQuery.fx = Tween.prototype.init; jQuery.fx.tick = function() { var timer, timers = jQuery.timers, i = 0; fxNow = jQuery.now(); for ( ; i < timers.length; i++ ) { timer = timers[ i ]; // Checks the timer has not already been removed if ( !timer() && timers[ i ] === timer ) { timers.splice( i--, 1 ); } } if ( !timers.length ) { jQuery.fx.stop(); } fxNow = undefined; }; jQuery.fx.timer = function( timer ) { if ( timer() && jQuery.timers.push( timer ) && !timerId ) { timerId = setInterval( jQuery.fx.tick, jQuery.fx.interval ); } }; jQuery.fx.interval = 13; jQuery.fx.stop = function() { clearInterval( timerId ); timerId = null; }; jQuery.fx.speeds = { slow: 600, fast: 200, // Default speed _default: 400 }; // Back Compat <1.8 extension point jQuery.fx.step = {}; if ( jQuery.expr && jQuery.expr.filters ) { jQuery.expr.filters.animated = function( elem ) { return jQuery.grep(jQuery.timers, function( fn ) { return elem === fn.elem; }).length; }; } var rroot = /^(?:body|html)$/i; jQuery.fn.offset = function( options ) { if ( arguments.length ) { return options === undefined ? this : this.each(function( i ) { jQuery.offset.setOffset( this, options, i ); }); } var docElem, body, win, clientTop, clientLeft, scrollTop, scrollLeft, box = { top: 0, left: 0 }, elem = this[ 0 ], doc = elem && elem.ownerDocument; if ( !doc ) { return; } if ( (body = doc.body) === elem ) { return jQuery.offset.bodyOffset( elem ); } docElem = doc.documentElement; // Make sure it's not a disconnected DOM node if ( !jQuery.contains( docElem, elem ) ) { return box; } // If we don't have gBCR, just use 0,0 rather than error // BlackBerry 5, iOS 3 (original iPhone) if ( typeof elem.getBoundingClientRect !== "undefined" ) { box = elem.getBoundingClientRect(); } win = getWindow( doc ); clientTop = docElem.clientTop || body.clientTop || 0; clientLeft = docElem.clientLeft || body.clientLeft || 0; scrollTop = win.pageYOffset || docElem.scrollTop; scrollLeft = win.pageXOffset || docElem.scrollLeft; return { top: box.top + scrollTop - clientTop, left: box.left + scrollLeft - clientLeft }; }; jQuery.offset = { bodyOffset: function( body ) { var top = body.offsetTop, left = body.offsetLeft; if ( jQuery.support.doesNotIncludeMarginInBodyOffset ) { top += parseFloat( jQuery.css(body, "marginTop") ) || 0; left += parseFloat( jQuery.css(body, "marginLeft") ) || 0; } return { top: top, left: left }; }, setOffset: function( elem, options, i ) { var position = jQuery.css( elem, "position" ); // set position first, in-case top/left are set even on static elem if ( position === "static" ) { elem.style.position = "relative"; } var curElem = jQuery( elem ), curOffset = curElem.offset(), curCSSTop = jQuery.css( elem, "top" ), curCSSLeft = jQuery.css( elem, "left" ), calculatePosition = ( position === "absolute" || position === "fixed" ) && jQuery.inArray("auto", [curCSSTop, curCSSLeft]) > -1, props = {}, curPosition = {}, curTop, curLeft; // need to be able to calculate position if either top or left is auto and position is either absolute or fixed if ( calculatePosition ) { curPosition = curElem.position(); curTop = curPosition.top; curLeft = curPosition.left; } else { curTop = parseFloat( curCSSTop ) || 0; curLeft = parseFloat( curCSSLeft ) || 0; } if ( jQuery.isFunction( options ) ) { options = options.call( elem, i, curOffset ); } if ( options.top != null ) { props.top = ( options.top - curOffset.top ) + curTop; } if ( options.left != null ) { props.left = ( options.left - curOffset.left ) + curLeft; } if ( "using" in options ) { options.using.call( elem, props ); } else { curElem.css( props ); } } }; jQuery.fn.extend({ position: function() { if ( !this[0] ) { return; } var elem = this[0], // Get *real* offsetParent offsetParent = this.offsetParent(), // Get correct offsets offset = this.offset(), parentOffset = rroot.test(offsetParent[0].nodeName) ? { top: 0, left: 0 } : offsetParent.offset(); // Subtract element margins // note: when an element has margin: auto the offsetLeft and marginLeft // are the same in Safari causing offset.left to incorrectly be 0 offset.top -= parseFloat( jQuery.css(elem, "marginTop") ) || 0; offset.left -= parseFloat( jQuery.css(elem, "marginLeft") ) || 0; // Add offsetParent borders parentOffset.top += parseFloat( jQuery.css(offsetParent[0], "borderTopWidth") ) || 0; parentOffset.left += parseFloat( jQuery.css(offsetParent[0], "borderLeftWidth") ) || 0; // Subtract the two offsets return { top: offset.top - parentOffset.top, left: offset.left - parentOffset.left }; }, offsetParent: function() { return this.map(function() { var offsetParent = this.offsetParent || document.body; while ( offsetParent && (!rroot.test(offsetParent.nodeName) && jQuery.css(offsetParent, "position") === "static") ) { offsetParent = offsetParent.offsetParent; } return offsetParent || document.body; }); } }); // Create scrollLeft and scrollTop methods jQuery.each( {scrollLeft: "pageXOffset", scrollTop: "pageYOffset"}, function( method, prop ) { var top = /Y/.test( prop ); jQuery.fn[ method ] = function( val ) { return jQuery.access( this, function( elem, method, val ) { var win = getWindow( elem ); if ( val === undefined ) { return win ? (prop in win) ? win[ prop ] : win.document.documentElement[ method ] : elem[ method ]; } if ( win ) { win.scrollTo( !top ? val : jQuery( win ).scrollLeft(), top ? val : jQuery( win ).scrollTop() ); } else { elem[ method ] = val; } }, method, val, arguments.length, null ); }; }); function getWindow( elem ) { return jQuery.isWindow( elem ) ? elem : elem.nodeType === 9 ? elem.defaultView || elem.parentWindow : false; } // Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods jQuery.each( { Height: "height", Width: "width" }, function( name, type ) { jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name }, function( defaultExtra, funcName ) { // margin is only for outerHeight, outerWidth jQuery.fn[ funcName ] = function( margin, value ) { var chainable = arguments.length && ( defaultExtra || typeof margin !== "boolean" ), extra = defaultExtra || ( margin === true || value === true ? "margin" : "border" ); return jQuery.access( this, function( elem, type, value ) { var doc; if ( jQuery.isWindow( elem ) ) { // As of 5/8/2012 this will yield incorrect results for Mobile Safari, but there // isn't a whole lot we can do. See pull request at this URL for discussion: // https://github.com/jquery/jquery/pull/764 return elem.document.documentElement[ "client" + name ]; } // Get document width or height if ( elem.nodeType === 9 ) { doc = elem.documentElement; // Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height], whichever is greatest // unfortunately, this causes bug #3838 in IE6/8 only, but there is currently no good, small way to fix it. return Math.max( elem.body[ "scroll" + name ], doc[ "scroll" + name ], elem.body[ "offset" + name ], doc[ "offset" + name ], doc[ "client" + name ] ); } return value === undefined ? // Get width or height on the element, requesting but not forcing parseFloat jQuery.css( elem, type, value, extra ) : // Set width or height on the element jQuery.style( elem, type, value, extra ); }, type, chainable ? margin : undefined, chainable, null ); }; }); }); // Expose jQuery to the global object window.jQuery = window.$ = jQuery; // Expose jQuery as an AMD module, but only for AMD loaders that // understand the issues with loading multiple versions of jQuery // in a page that all might call define(). The loader will indicate // they have special allowances for multiple jQuery versions by // specifying define.amd.jQuery = true. Register as a named module, // since jQuery can be concatenated with other files that may use define, // but not use a proper concatenation script that understands anonymous // AMD modules. A named AMD is safest and most robust way to register. // Lowercase jquery is used because AMD module names are derived from // file names, and jQuery is normally delivered in a lowercase file name. // Do this after creating the global so that if an AMD module wants to call // noConflict to hide this version of jQuery, it will work. if ( typeof define === "function" && define.amd && define.amd.jQuery ) { define( "jquery", [], function () { return jQuery; } ); } })( window ); ceilometer-2014.1.5/ceilometer/api/v1/static/LICENSE.d3.v2.min.js0000664000567000056700000000262012540642615025014 0ustar jenkinsjenkins00000000000000Copyright (c) 2012, Michael Bostock All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The name Michael Bostock may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ceilometer-2014.1.5/ceilometer/api/v1/static/rickshaw.css0000664000567000056700000001421412540642615024306 0ustar jenkinsjenkins00000000000000.rickshaw_graph .detail { pointer-events: none; position: absolute; top: 0; z-index: 2; background: rgba(0, 0, 0, 0.1); bottom: 0; width: 1px; transition: opacity 0.25s linear; -moz-transition: opacity 0.25s linear; -o-transition: opacity 0.25s linear; -webkit-transition: opacity 0.25s linear; } .rickshaw_graph .detail.inactive { opacity: 0; } .rickshaw_graph .detail .item.active { opacity: 1; } .rickshaw_graph .detail .x_label { font-family: Arial, sans-serif; border-radius: 3px; padding: 6px; opacity: 0.5; border: 1px solid #e0e0e0; font-size: 12px; position: absolute; background: white; white-space: nowrap; } .rickshaw_graph .detail .item { position: absolute; z-index: 2; border-radius: 3px; padding: 0.25em; font-size: 12px; font-family: Arial, sans-serif; opacity: 0; background: rgba(0, 0, 0, 0.4); color: white; border: 1px solid rgba(0, 0, 0, 0.4); margin-left: 1em; margin-top: -1em; white-space: nowrap; } .rickshaw_graph .detail .item.active { opacity: 1; background: rgba(0, 0, 0, 0.8); } .rickshaw_graph .detail .item:before { content: "\25c2"; position: absolute; left: -0.5em; color: rgba(0, 0, 0, 0.7); width: 0; } .rickshaw_graph .detail .dot { width: 4px; height: 4px; margin-left: -4px; margin-top: -3px; border-radius: 5px; position: absolute; box-shadow: 0 0 2px rgba(0, 0, 0, 0.6); background: white; border-width: 2px; border-style: solid; display: none; background-clip: padding-box; } .rickshaw_graph .detail .dot.active { display: block; } /* graph */ .rickshaw_graph { position: relative; } .rickshaw_graph svg { display: block; overflow: hidden; } /* ticks */ .rickshaw_graph .x_tick { position: absolute; top: 0; bottom: 0; width: 0px; border-left: 1px dotted rgba(0, 0, 0, 0.2); pointer-events: none; } .rickshaw_graph .x_tick .title { position: absolute; font-size: 12px; font-family: Arial, sans-serif; opacity: 0.5; white-space: nowrap; margin-left: 3px; bottom: 1px; } /* annotations */ .rickshaw_annotation_timeline { height: 1px; border-top: 1px solid #e0e0e0; margin-top: 10px; position: relative; } .rickshaw_annotation_timeline .annotation { position: absolute; height: 6px; width: 6px; margin-left: -2px; top: -3px; border-radius: 5px; background-color: rgba(0, 0, 0, 0.25); } .rickshaw_graph .annotation_line { position: absolute; top: 0; bottom: -6px; width: 0px; border-left: 2px solid rgba(0, 0, 0, 0.3); display: none; } .rickshaw_graph .annotation_line.active { display: block; } .rickshaw_graph .annotation_range { background: rgba(0, 0, 0, 0.1); display: none; position: absolute; top: 0; bottom: -6px; z-index: -10; } .rickshaw_graph .annotation_range.active { display: block; } .rickshaw_graph .annotation_range.active.offscreen { display: none; } .rickshaw_annotation_timeline .annotation .content { background: white; color: black; opacity: 0.9; padding: 5px 5px; box-shadow: 0 0 2px rgba(0, 0, 0, 0.8); border-radius: 3px; position: relative; z-index: 20; font-size: 12px; padding: 6px 8px 8px; top: 18px; left: -11px; width: 160px; display: none; cursor: pointer; } .rickshaw_annotation_timeline .annotation .content:before { content: "\25b2"; position: absolute; top: -11px; color: white; text-shadow: 0 -1px 1px rgba(0, 0, 0, 0.8); } .rickshaw_annotation_timeline .annotation.active, .rickshaw_annotation_timeline .annotation:hover { background-color: rgba(0, 0, 0, 0.8); cursor: none; } .rickshaw_annotation_timeline .annotation .content:hover { z-index: 50; } .rickshaw_annotation_timeline .annotation.active .content { display: block; } .rickshaw_annotation_timeline .annotation:hover .content { display: block; z-index: 50; } .rickshaw_graph .y_axis { fill: none; } .rickshaw_graph .y_ticks .tick { stroke: rgba(0, 0, 0, 0.16); stroke-width: 2px; shape-rendering: crisp-edges; pointer-events: none; } .rickshaw_graph .y_grid .tick { z-index: -1; stroke: rgba(0, 0, 0, 0.20); stroke-width: 1px; stroke-dasharray: 1 1; } .rickshaw_graph .y_grid path { fill: none; stroke: none; } .rickshaw_graph .y_ticks path { fill: none; stroke: #808080; } .rickshaw_graph .y_ticks text { opacity: 0.5; font-size: 12px; pointer-events: none; } .rickshaw_graph .x_tick.glow .title, .rickshaw_graph .y_ticks.glow text { fill: black; color: black; text-shadow: -1px 1px 0 rgba(255, 255, 255, 0.1), 1px -1px 0 rgba(255, 255, 255, 0.1), 1px 1px 0 rgba(255, 255, 255, 0.1), 0px 1px 0 rgba(255, 255, 255, 0.1), 0px -1px 0 rgba(255, 255, 255, 0.1), 1px 0px 0 rgba(255, 255, 255, 0.1), -1px 0px 0 rgba(255, 255, 255, 0.1), -1px -1px 0 rgba(255, 255, 255, 0.1); } .rickshaw_graph .x_tick.inverse .title, .rickshaw_graph .y_ticks.inverse text { fill: white; color: white; text-shadow: -1px 1px 0 rgba(0, 0, 0, 0.8), 1px -1px 0 rgba(0, 0, 0, 0.8), 1px 1px 0 rgba(0, 0, 0, 0.8), 0px 1px 0 rgba(0, 0, 0, 0.8), 0px -1px 0 rgba(0, 0, 0, 0.8), 1px 0px 0 rgba(0, 0, 0, 0.8), -1px 0px 0 rgba(0, 0, 0, 0.8), -1px -1px 0 rgba(0, 0, 0, 0.8); } .rickshaw_legend { font-family: Arial; font-size: 12px; color: white; background: #404040; display: inline-block; padding: 12px 5px; border-radius: 2px; position: relative; } .rickshaw_legend:hover { z-index: 10; } .rickshaw_legend .swatch { width: 10px; height: 10px; border: 1px solid rgba(0, 0, 0, 0.2); } .rickshaw_legend .line { clear: both; line-height: 140%; padding-right: 15px; } .rickshaw_legend .line .swatch { display: inline-block; margin-right: 3px; border-radius: 2px; } .rickshaw_legend .label { white-space: nowrap; display: inline; } .rickshaw_legend .action:hover { opacity: 0.6; } .rickshaw_legend .action { margin-right: 0.2em; font-size: 10px; opacity: 0.2; cursor: pointer; font-size: 14px; } .rickshaw_legend .line.disabled { opacity: 0.4; } .rickshaw_legend ul { list-style-type: none; margin: 0; padding: 0; margin: 2px; cursor: pointer; } .rickshaw_legend li { padding: 0 0 0 2px; min-width: 80px; white-space: nowrap; } .rickshaw_legend li:hover { background: rgba(255, 255, 255, 0.08); border-radius: 3px; } .rickshaw_legend li:active { background: rgba(255, 255, 255, 0.2); border-radius: 3px; } ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/0000775000567000056700000000000012540643223023770 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/img/0000775000567000056700000000000012540643223024544 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/img/glyphicons-halflings.png0000664000567000056700000003077712540642615031420 0ustar jenkinsjenkins00000000000000PNG  IHDRtEXtSoftwareAdobe ImageReadyqe<1IDATx}ml\EW^ɺD$|nw';vю8m0kQSnSV;1KGsԩ>UoTU1cƖYuּca&#C,pؚ>kں ULW -sn3Vq~NocI~L{- H8%_M£wB6EW,ĢpY2+(Y@&A/3kXhߍ-aA<>P'\J;(}#Qz:4%m?nfntK*l9J+DIYu1YZ^(]YYEf@ОlXz]Ut u &5-PW}@t|#LY=s܂,w#+R+?Ƌax X0"ea)tG*ԡwVwV^rf%xB(qּ4>WG#lWU<ЁXJVѶlR$kDVrI7:X%X1NEzw;y9z9O%~~uɗ*=Ixcy}Y(ou ±N$^j e\iX񝜬];Y-rѲ&>!zlYaVHVN԰9=]=mRMdOUC JUiT}rWW'ڹu)ʢF"YU#P׾&ܑЅROwyzm$Os? +^FTIEq%&~ >M}]ԖwA? [Nteexn(措BdMTpʥnqqS?bWXmW6x*{V_!VjΧsVL^j XkQjU6sk̩n~[qǸ-` O:G7l"ksRe2vQ=QƼJUX`gQy~ ďKȰE]#P:td\T/u;س:Jc-%'e q ?j"/yh48Zi1|JUu>_N;hxwNU JQU7\j̮bT:B?6oJ1Ί%I UY-Ii4{=rǤ7@)HKJ+f4X8Cd?'j1 N< 39EWo VTGzg# %D0#ܠ3[tiآ( U,]125|Ṋfw7w u+Š]Db]K xbW ՛7|ВX㕛{UcGXk¬|(h)IUa)lp 3luPU]D)/7~4Wt5J}V X0z VM;>Gԙ^|gF:jaZ^)74C#jwr,еSlGu;1vm><)}ZQՖ&mZ:1UMB~ a:/᜗:KWWOҠ&Y2f7cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘g*3fF5LbN2#Tf=C`!ZGUe꣇e2V<1mkS4iϗ*.{N8Xaj~ڀnAx,%fE:|YDVj ¢lg6(:k~MM5?4 ]WO>诋WZiG|QGJeK[YcյpmjE\f/ǎ8&OQ3 .3tt2'-V8pXSrY#J!Q ",ub@FK:u^iy[]<.Cw+W\)b kr-.MtڀMqʄ۰#$^X$"V`T4m~w%Pp1|+&UxY8*r8:k7QЃҀT$Ўƙ S>~Sjs:5q.w&_Z.X=:ވbw` _kd{'0:ds#qi!224nq\9-KUTsSUuVo@;Uz>^=Np>oPO @I@'Gj5o*U>^*ew>ͫʧ᫠Q5 ̈́<$#5Jٻj6e)_ d]2B:^(*:8JYS鬆Kݗ ]U4_rj{5ׇaǑ/yV?GtGb@xPU7O3|鍪 IQ5QGw *(;wf0*PUU<YƔvbt5{2!,}Ҧ:)j2OkΪ' ֊0I.q\(%ojQĖՇa<ԍexAgt'[d;׸`rcdjPFU$UeJI6T&Z}z(z vfuz {}ۿߝݞlxUZ謊.Y岟b%nw@ǩS9|źs%>_o#9\EU~/ځt(r[QZuOo;!MrU]0TcpDő?.cPuF;L_Sb}R/J_+h2$ai UǩS9>Є}76rzu~国4oĨ 1J ^̘~iC޸55G׹]gwsn zTuO=?/zƲc>Οb#7ֻcgkޛTUj*-T=]uu}>ݨNЭ [ ]:%/_ Sz]6D.mD7Uƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1c>J4hPP+A;'G_XKmL5I.},wFFum$S-E-;Õ C3I-`BRx1ғTJݕ;hΊ8 DYJo;Yš5MKɰM;%Pd9KhnD[zgVh,'C p!^M(WK2X>UQ%^p8 ˽^#Ζ؄+.@gCz%ɔ-Pr KX n>=ՔѨeSvRLz5%9UQS \WիK'hp)ô Jrh M0F (f_R5///G+x 1"eS 5 :Tf=+7Qɧ\TEs༬rYs8&k#pSՊ5MTbD܊[Ng5Q\s5PB@[8ɨV1&4Wsy[Ǿ wU2V77jމd^~YfC_h;a.&M i UWpzs`>/"'OI۲y:BzdTq£=йb:"m/-/PWDQǴ͐57m`H%AV!Hԛ׿@"Qzދ|ߒT-*OU^Ҧ6!Cwk|h&Hd5LEYy'ƣ7%*{=)Z%ٝP *G]/8Lw$?8M)\į/#7Ufd7'6\h1 vIfEIr=1w\WKVZHKgZ͡$mx % `j}TuTQJZ*H>*QxkLFTyU-)ôbiA|q`F'+ 4^Qy xH)#t^?@]^`ARSqjgB:rK۷l<2-4YKhgQLxVwP~M Φ0l 3ƅaŊITȀhwJmxIMչ|U7xˆS~2ߕ?kW1kC3];YnSґAeXYz8,'x< k7Kx]$x$vgT#w;o@ z_Vmn|HֵhZg-^TAn- )@4[*9xKƋj>!,Vt:eqn8%ohS(2\Q^aigF3vTUDVlQꅧWc%Ueq4ҝº/U $_Q!>t| ,țG<tC[xTXmf|Q%d#jUՆ|; H[bά#,Ws7NT1~m&ǻ{' \㟾 bBKJo8%!$Qj:/RX)$Sy޳ 䍧RDUg_D軦J\jN֖SU;~?Ohssdƣ}6(T <_4b5 ^N N%8QejF7toMyө`)g[/|?өJuGL坕/=CTܠhdifHcǞG4,`D՞{'xG_p/5@m +$jVH3a"*ũ,,HJҵȸT^Qyo&IÉJUVwWLeM~3tA6rwɤ6տ \0HL%LX5c@HHÃZ|NV+7WM{cig*ȸU7iÉбzd * ?gtX8̝OX:]2ɍ]p^++>AVڛE{ DB.&/56ArxY#ܕy)cKQtȪ~! ;C}ʃtf{6$NVsj wupZ)zŁ|-wg+nMVj/d+U~ͯi:_ix whqr>駃-x뼬)ݷyR=! ì:J/lIkV@n74758Z KJ(Uxz1w)^\ԣzȪ󲦨c2f؍v+6f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘2N oC\F1ִ UZJV̚\4Mgq1z{&YT ,HX~D u\g}x>+YdN̮ol ZX+F[/j+S~2/jV8Jr^ԉ]J}J*ۏ<2԰&JݣjOM@ѯ#0O[SXB^ uze\]dd./xXE f'vO_H${%;kt7ށmő|d{aފ^ǛڎE5ʋBr]W=_SAf(0 oU5q ,_\luz˪uz㻲o=Yi~| 0+=VJت /ލzM\zCL[U:|k*^8"\Wٚ\ .XTjX5 SkFu\1 q'mģ/QUؕ*AɽDNZ׮?_[# ˍ4:^j|5LG ||øBW{6[uQF.1$qF9IHg)\5>C#uXZ$#*<ߐsRv1Tj>Jm>*#( [Fhsש5*jQʼ&&&P犛L[Q1* ;X}Iΰ[Q?qQZ Hݙ֞VEsBCZ9JTK tup˷ /O,.kUdsOHMg4=-)+ؿh2Nw/r|WQn=GIU;'j,vfdzpe$V GTYsBZO1pj:r"nTUSCgr veAۘ˜FC+Ֆ#[JTe'v9-3 Dmӻuuz?0 o hxuY &_54=f07kלU0]D:jdw/+PGUVS<\2uatc^zYRąmC+7#,|:iNw*|^sm|X>Ъ^1\#͹ &%{,2U>ݎ.c05z# ogNO+Q쓭 ,˗-%K\[S_`y+b_94"U+Ύap}I[M,B.NtwHj漬E L߀ 0DX(kڵ NoU{gquz RwkէRx'uZ[3'zyyד%sƕ3jYF\s=m1&VAɼ?k\+]6yモ1gtOIW7al|1 >$]e 7؝WIe?ަL#>| ҭ] pM5MUdI61ԠeǼYGhOn3խR:^k_'Yuuq#p# J2xl>OjcY馃!ڡ+sZ/ D}2AY mpc#<'xSKx`*W[,e|6BH)㶤kjpDU(2qzx9*tqa/, Z[ 0>Ө֜xN)fă@qըFU՝w(a;ˋ>|Tc|w2eiT]*!_\WG{ ]^݅Z5t|6oYHaO@= my^akE.uz]#٥hWv(:,6A߉JFa\ wWex>vetuMYA>).,;ɦCbwjE)W Fӫ@s4e6^Q9oI}4x<.B?B߫#$Hx.x9,a!RTpgd5xBe.L7@* AsduttSVUaRU|I xG߃$T񭟬#_IFMŒ_X@foQIDII?|%$r {ENĸwޕqq?Dؽ}}o/`ӣCTi /ywO rD 9YUD] Ή@s]+'UaL} hrU'7:sU|k)H@hNq#ϵ8y˭Xű#w 1!흉R'7fuד0p!WÖW+Nmp\-ioD$g٠˅%%ÐmV]̱rw*Z}y+L Nouj}xt)lStuqxmNyKUOnDbhf}k>6ufT%{ <񐮸mjFcmUïc;w8@dGFUA& =nq5]iP}z:k⼶-ʓ Κl*'UzaxWFdZzTNRs+# wzgi:MBqtM l#^'Gߣ*^t{=rERnQ$adJl02%Tڊ^<~g?Of*U^?:N+o[PUs|QR']V-L)H K䐞 mYn\4}YVD hR;g-'3aסM Dh}1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌk*Ț4`L$b U4\dt'>HȄ|.+Y+/Gy2OCWv3v,'kia W O6߯E=Hv $LlxI躍/}^]x\3 ɮ5 QT&G9Ay^i}O[5ޱwq4,s JJI.myE^%'VB~dׯ}*j* ~uTk\fKЬ*Y]_v'I˨鑩6Xo'j&uɧngT]oڌ9\*wVHӖ| >:5EF'J ɝ`!A e~_;5ױϊ镋m_&OVi<}"靍hW9X6KPƣ G"ƭ?/O^hCHLciPj)}QQզ#tMg9 xGw~d;_J+RỲ<;e 5/Qs/5N[!a+NPb+ѺI}-t_qU=MKʞY5no*vvbʊ{]| ~ Z{-끇^FVviϵ3Ya=6ndS;-ʹ^;uꪪ^ |=_w+"i&4l#wir|W3U$"J~O@]~tRJVMHw:̦@?>O?vdrtS*$&~1>Z}^nL(]f*&*QaIꝄ|3*O?r?*4Gyz[k/tkQϖWCCKk/x5|S*`ϹγQEwy o KYqTb$-/PtsZNKQ*>ݢU@Џ"JQ;¹& Lx;+T /+O赟> (T?ķD^N*'p$IW֐W~ =J|_UTe7ְP`;CYjk=sU[mߙ-;};2|wo1p0~>0m @Jrǟcٷ4͜?q\UUIV?2L/+Шꄾ< ܇^T ?tj\JrҀB*=km X,n}aՒIadp׷ll{\6v8RꅟҲf1F|Տ;e=\D ,D:ψrxQT◎*|{nS 9~=}ӕG~%j:Dj<ឫ:jO% $T8!jvm|'OЗ¹➱z\vsIv`Ȕʨj-^$-^G Q{m`T#c֞㸝|n.ߪN$O JUVʼt,jg-mסּNV z:(Ι*|1Ux=Yk*t MNNDUhK ؞X(刄Rv!#B_cxRŹoE5Dg>?fXQQ˔|@"աMveC>mO$H#]Y I=)_`k* :a>!X!W^wҒl'<;vwgIt_?Jh`#E:fdx=6Wu<Ӌd2di˂c#h¬c4?<HFYoVpN;ݷJ\ >` (t3{>⦊;;qFx4YcS$w.da*k|Q,+xs^K߫P^nO֮L5mIwl?-.ʲJ8 F B.-:2Ȕ!/A#b_m%I($|PZ[1G{^#o>3mw?'cx[^:Wk/`'=~֥W(gQbfv7UzM3+؍K:4|GCtA+Kʨ{@Ɩ [05E|yn4MIENDB`ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/img/glyphicons-halflings-white.png0000664000567000056700000002111112540642615032514 0ustar jenkinsjenkins00000000000000PNG  IHDRӳ{PLTEmmmⰰᒒttt󻻻bbbeeeggg𶶶xxx󛛛Ƽ몪֢UUU鿿rOtRNS#_ /oS?C kDOS_6>4!~a @1_'onҋM3BQjp&%!l"Xqr; A[<`am}43/0IPCM!6(*gK&YQGDP,`{VP-x)h7e1]W$1bzSܕcO]U;Zi'y"؆K 64Y*.v@c.};tN%DI !ZЏ5LH26 ɯ" -bE,,)ʏ B>mn6pmRO wm@V#?'CȑZ#qb|$:)/E%nRqChn%i̓}lm ?idd",`H"r.z~(bQU&)5X#EMR<*p[[%.Ọk7lIoJF lV!̡ăuH`&,zRk$|$lXbjߪdU?Σ$HW$U'HE3*խU\}( zhVk}guRk$%|T|ck獳"D_W+.Q)@ƽHbslTDR2Xm#a 3lYzj㒚#! 4J8(cvt]aT D ΅Q?^-_^$:\V $N|=(vZ'q6Z׆B5V!y3K㱿bv4xR]al!IoP@tVyL٪mlڿIUb|[*lke'*WddDӝ}\W_WߝrN?vޫ۲X%0uoui*JVƦb%}i5IYlNE-wςf_W3mI-mQ)S kTC7m<"܌bT|'$ҘR&>O p6tSN\ׯLm\r@3uT b7t.5.q3r0=8TiJ\6uF R32^'ŪxI F8O{%8kJMSȴdBEdWCYO:/ON/I_=xFE! =i:o~ y?''[͓[͓[͓[͓[ͭ.U>$PƦc%]\c:| ,eSZ,oXrX!R@Zv 0>?* <|N60;{ad2v+D^t[q!۞V}fۨϏYeॗ)Vyl|" fUq@Ǽ4Y-Y-!6aB:o%JIUQ|UKO`=\ :0x Pau@!KPdxhw1>$j΍vZdxSUA&[URd7øzk/rU^w:I.VǮc>q.!zSr&2)Wg R -iQ 8Pa\ОU%iݡU_=p Lu(N?0?Æ:]άtB%U|NsorNf ,P !v" Y6hL_@@bscqgv4||0lϟ$S9bʱj#~?o}}7sAPm:IV=n !{{hEࢪ8suoLT$;VscqD3 ༂3.DBB4&V' T `D6Ϸqyj8V*X%@s\jrN$|=5Ά 'mUiKi%CI:ssaƅ`*`=l)>u՘MeuSI_OL_}o&jzp{lu:O)s%Q@$<]f xO%PCbhr2PKpf5Në3^o]eJiB464^tuٲU֌:G4'22YpuG'/Py4?.SBP_>I 1t3ΓBɭɭɭɭVVVVVs]!67(g y@ 4>Q VF}^Xׇڼje26 L%YGh lC})< !EEPZWZV+@†R 5{@ouɐ4&H6ey V݀VťcqZޒrJyByFzFN$Hb*+jՏqэ ګkݿUXle1d0d^-B%} {Y%r*j5Ak5u",:~ҸY~ hSA~6 fulՇf{ȵQtATHZkƭ/_Sn u']b]|m`BāJ,O$du]Zs FL:aǙT4o~by?wpj滥A(x]†f~an֧/^dڲcՇ,!1i&xi_VK@ip̓9Vi%a; L?0J*Ū5U'x^6V[^ {eU|:0=0d۫o*Jq%[YN.sQLud[29I:WnmXlڃ6!lNlVէKUjV\J%UߊBLcKfb>a=b~R]aG%[js@/9MطݘU>yɲX@} Ftg^vO\Ӹwvpz3K5i!$P>ā'VƛL2r@UMKZ6tw맟¦bm1h||]}~0MjA(JJP68C&yr׉e}j_cJ?I0k>šW |Bޝ."TEXd 8!cw*E(J)![W"j_ТeX_XB;oO0~?:PC (.[!Wq%*leY)E<^KZT60.#A\5;Rmtkd/8)5~^0 #Ckgey)ͶԺ6ĥ<(?&uAVm0^h.txR*a':,H|ō l5z;8+e#b'#|}2w(|KcJ l6 w^Տoi3H R ̔9,YgPְ:N [5SR![)]i}`mN4Хv`|;f(FltL8÷Z#AO%Y)NU5YedJE3dZذݣHT1 ;8MjnʏӤqp 1h^<<>yt{?|'j)}YUU{@V/J1F+7䀉[OWO[ yUY!?BD%DWj>-Ai6xz)U R7 d@g\so)a4zf[W+> P> |qLG8vȣlj2Zt+VA6gT *ʆUz(m)CD `He/.:zN9pgo &NC׃އ>Wհ_Hj)Xe6F7pm-`'c.AZ=^e8F;{Rtn(z!S7o Iew3]bܗ85|iϠRJkʱZRO+8U&:]ZieR(JMޗ7Z@5a^\GzsρU*rMezT^:ɬͦX=>$ bi>U&XQoybbGk8 Ҙn).Սo ^MmdZi$soo*{4eLbLٳ""mx:`:mk[geTެ)'0*TB{!I ''''[͓[͓[͓[͓[]Zj Q.e '/yvQ71(Z&X?(_Z){tڀmZWϏ)-C jqn,̋"IvUL!h꛿skAcrN佚фVE40yX~4zʸV㳰%,)fqtpu~  *^0:ܲ33JO(ZB?K^ v]unlWi0p6[착C_5X#[wX3b廫R{NKAe Se|wxso>P\儔ԕ6;nVmfI$V͓J-J%֌0UwYЎSnum藮xz˗VƫIvnW_qLZ"_Xz 8]Ap?C543zw({7e*Ȳ`۰!AQ:KUnz]1yVGaCm0PY ٚUx6TT&hV9V ӬzÑ 1[XzZ9erqJND/gX*9oN6D` {I%Mz9—TQ7f\"j_3~xB'ܷY]*KЌ%"5"qxq~ƕ=jS>jV&~]2xzF1X_yD<#NRB}K/iy !V^˿eJ}/FkA7 S+.(ecJ:zWZ몖wQ~ä́p6,e5,+,tv%O^OO}ן -O7>ekC6wa_C |9*WA)UJg8=:mjUvqysܒLglC6+[FSWg9wV31A ND<$5e(s[ ۨbaF.]KIENDB`ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/css/0000775000567000056700000000000012540643223024560 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/css/bootstrap.css0000664000567000056700000035547712540642615027340 0ustar jenkinsjenkins00000000000000/*! * Bootstrap v2.2.1 * * Copyright 2012 Twitter, Inc * Licensed under the Apache License v2.0 * http://www.apache.org/licenses/LICENSE-2.0 * * Designed and built with all the love in the world @twitter by @mdo and @fat. */ article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { display: block; } audio, canvas, video { display: inline-block; *display: inline; *zoom: 1; } audio:not([controls]) { display: none; } html { font-size: 100%; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; } a:focus { outline: thin dotted #333; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } a:hover, a:active { outline: 0; } sub, sup { position: relative; font-size: 75%; line-height: 0; vertical-align: baseline; } sup { top: -0.5em; } sub { bottom: -0.25em; } img { width: auto\9; height: auto; max-width: 100%; vertical-align: middle; border: 0; -ms-interpolation-mode: bicubic; } #map_canvas img, .google-maps img { max-width: none; } button, input, select, textarea { margin: 0; font-size: 100%; vertical-align: middle; } button, input { *overflow: visible; line-height: normal; } button::-moz-focus-inner, input::-moz-focus-inner { padding: 0; border: 0; } button, html input[type="button"], input[type="reset"], input[type="submit"] { cursor: pointer; -webkit-appearance: button; } input[type="search"] { -webkit-box-sizing: content-box; -moz-box-sizing: content-box; box-sizing: content-box; -webkit-appearance: textfield; } input[type="search"]::-webkit-search-decoration, input[type="search"]::-webkit-search-cancel-button { -webkit-appearance: none; } textarea { overflow: auto; vertical-align: top; } .clearfix { *zoom: 1; } .clearfix:before, .clearfix:after { display: table; line-height: 0; content: ""; } .clearfix:after { clear: both; } .hide-text { font: 0/0 a; color: transparent; text-shadow: none; background-color: transparent; border: 0; } .input-block-level { display: block; width: 100%; min-height: 30px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } body { margin: 0; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px; color: #333333; background-color: #ffffff; } a { color: #0088cc; text-decoration: none; } a:hover { color: #005580; text-decoration: underline; } .img-rounded { -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; } .img-polaroid { padding: 4px; background-color: #fff; border: 1px solid #ccc; border: 1px solid rgba(0, 0, 0, 0.2); -webkit-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1); -moz-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1); box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1); } .img-circle { -webkit-border-radius: 500px; -moz-border-radius: 500px; border-radius: 500px; } .row { margin-left: -20px; *zoom: 1; } .row:before, .row:after { display: table; line-height: 0; content: ""; } .row:after { clear: both; } [class*="span"] { float: left; min-height: 1px; margin-left: 20px; } .container, .navbar-static-top .container, .navbar-fixed-top .container, .navbar-fixed-bottom .container { width: 940px; } .span12 { width: 940px; } .span11 { width: 860px; } .span10 { width: 780px; } .span9 { width: 700px; } .span8 { width: 620px; } .span7 { width: 540px; } .span6 { width: 460px; } .span5 { width: 380px; } .span4 { width: 300px; } .span3 { width: 220px; } .span2 { width: 140px; } .span1 { width: 60px; } .offset12 { margin-left: 980px; } .offset11 { margin-left: 900px; } .offset10 { margin-left: 820px; } .offset9 { margin-left: 740px; } .offset8 { margin-left: 660px; } .offset7 { margin-left: 580px; } .offset6 { margin-left: 500px; } .offset5 { margin-left: 420px; } .offset4 { margin-left: 340px; } .offset3 { margin-left: 260px; } .offset2 { margin-left: 180px; } .offset1 { margin-left: 100px; } .row-fluid { width: 100%; *zoom: 1; } .row-fluid:before, .row-fluid:after { display: table; line-height: 0; content: ""; } .row-fluid:after { clear: both; } .row-fluid [class*="span"] { display: block; float: left; width: 100%; min-height: 30px; margin-left: 2.127659574468085%; *margin-left: 2.074468085106383%; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .row-fluid [class*="span"]:first-child { margin-left: 0; } .row-fluid .controls-row [class*="span"] + [class*="span"] { margin-left: 2.127659574468085%; } .row-fluid .span12 { width: 100%; *width: 99.94680851063829%; } .row-fluid .span11 { width: 91.48936170212765%; *width: 91.43617021276594%; } .row-fluid .span10 { width: 82.97872340425532%; *width: 82.92553191489361%; } .row-fluid .span9 { width: 74.46808510638297%; *width: 74.41489361702126%; } .row-fluid .span8 { width: 65.95744680851064%; *width: 65.90425531914893%; } .row-fluid .span7 { width: 57.44680851063829%; *width: 57.39361702127659%; } .row-fluid .span6 { width: 48.93617021276595%; *width: 48.88297872340425%; } .row-fluid .span5 { width: 40.42553191489362%; *width: 40.37234042553192%; } .row-fluid .span4 { width: 31.914893617021278%; *width: 31.861702127659576%; } .row-fluid .span3 { width: 23.404255319148934%; *width: 23.351063829787233%; } .row-fluid .span2 { width: 14.893617021276595%; *width: 14.840425531914894%; } .row-fluid .span1 { width: 6.382978723404255%; *width: 6.329787234042553%; } .row-fluid .offset12 { margin-left: 104.25531914893617%; *margin-left: 104.14893617021275%; } .row-fluid .offset12:first-child { margin-left: 102.12765957446808%; *margin-left: 102.02127659574467%; } .row-fluid .offset11 { margin-left: 95.74468085106382%; *margin-left: 95.6382978723404%; } .row-fluid .offset11:first-child { margin-left: 93.61702127659574%; *margin-left: 93.51063829787232%; } .row-fluid .offset10 { margin-left: 87.23404255319149%; *margin-left: 87.12765957446807%; } .row-fluid .offset10:first-child { margin-left: 85.1063829787234%; *margin-left: 84.99999999999999%; } .row-fluid .offset9 { margin-left: 78.72340425531914%; *margin-left: 78.61702127659572%; } .row-fluid .offset9:first-child { margin-left: 76.59574468085106%; *margin-left: 76.48936170212764%; } .row-fluid .offset8 { margin-left: 70.2127659574468%; *margin-left: 70.10638297872339%; } .row-fluid .offset8:first-child { margin-left: 68.08510638297872%; *margin-left: 67.9787234042553%; } .row-fluid .offset7 { margin-left: 61.70212765957446%; *margin-left: 61.59574468085106%; } .row-fluid .offset7:first-child { margin-left: 59.574468085106375%; *margin-left: 59.46808510638297%; } .row-fluid .offset6 { margin-left: 53.191489361702125%; *margin-left: 53.085106382978715%; } .row-fluid .offset6:first-child { margin-left: 51.063829787234035%; *margin-left: 50.95744680851063%; } .row-fluid .offset5 { margin-left: 44.68085106382979%; *margin-left: 44.57446808510638%; } .row-fluid .offset5:first-child { margin-left: 42.5531914893617%; *margin-left: 42.4468085106383%; } .row-fluid .offset4 { margin-left: 36.170212765957444%; *margin-left: 36.06382978723405%; } .row-fluid .offset4:first-child { margin-left: 34.04255319148936%; *margin-left: 33.93617021276596%; } .row-fluid .offset3 { margin-left: 27.659574468085104%; *margin-left: 27.5531914893617%; } .row-fluid .offset3:first-child { margin-left: 25.53191489361702%; *margin-left: 25.425531914893618%; } .row-fluid .offset2 { margin-left: 19.148936170212764%; *margin-left: 19.04255319148936%; } .row-fluid .offset2:first-child { margin-left: 17.02127659574468%; *margin-left: 16.914893617021278%; } .row-fluid .offset1 { margin-left: 10.638297872340425%; *margin-left: 10.53191489361702%; } .row-fluid .offset1:first-child { margin-left: 8.51063829787234%; *margin-left: 8.404255319148938%; } [class*="span"].hide, .row-fluid [class*="span"].hide { display: none; } [class*="span"].pull-right, .row-fluid [class*="span"].pull-right { float: right; } .container { margin-right: auto; margin-left: auto; *zoom: 1; } .container:before, .container:after { display: table; line-height: 0; content: ""; } .container:after { clear: both; } .container-fluid { padding-right: 20px; padding-left: 20px; *zoom: 1; } .container-fluid:before, .container-fluid:after { display: table; line-height: 0; content: ""; } .container-fluid:after { clear: both; } p { margin: 0 0 10px; } .lead { margin-bottom: 20px; font-size: 21px; font-weight: 200; line-height: 30px; } small { font-size: 85%; } strong { font-weight: bold; } em { font-style: italic; } cite { font-style: normal; } .muted { color: #999999; } .text-warning { color: #c09853; } a.text-warning:hover { color: #a47e3c; } .text-error { color: #b94a48; } a.text-error:hover { color: #953b39; } .text-info { color: #3a87ad; } a.text-info:hover { color: #2d6987; } .text-success { color: #468847; } a.text-success:hover { color: #356635; } h1, h2, h3, h4, h5, h6 { margin: 10px 0; font-family: inherit; font-weight: bold; line-height: 20px; color: inherit; text-rendering: optimizelegibility; } h1 small, h2 small, h3 small, h4 small, h5 small, h6 small { font-weight: normal; line-height: 1; color: #999999; } h1, h2, h3 { line-height: 40px; } h1 { font-size: 38.5px; } h2 { font-size: 31.5px; } h3 { font-size: 24.5px; } h4 { font-size: 17.5px; } h5 { font-size: 14px; } h6 { font-size: 11.9px; } h1 small { font-size: 24.5px; } h2 small { font-size: 17.5px; } h3 small { font-size: 14px; } h4 small { font-size: 14px; } .page-header { padding-bottom: 9px; margin: 20px 0 30px; border-bottom: 1px solid #eeeeee; } ul, ol { padding: 0; margin: 0 0 10px 25px; } ul ul, ul ol, ol ol, ol ul { margin-bottom: 0; } li { line-height: 20px; } ul.unstyled, ol.unstyled { margin-left: 0; list-style: none; } dl { margin-bottom: 20px; } dt, dd { line-height: 20px; } dt { font-weight: bold; } dd { margin-left: 10px; } .dl-horizontal { *zoom: 1; } .dl-horizontal:before, .dl-horizontal:after { display: table; line-height: 0; content: ""; } .dl-horizontal:after { clear: both; } .dl-horizontal dt { float: left; width: 160px; overflow: hidden; clear: left; text-align: right; text-overflow: ellipsis; white-space: nowrap; } .dl-horizontal dd { margin-left: 180px; } hr { margin: 20px 0; border: 0; border-top: 1px solid #eeeeee; border-bottom: 1px solid #ffffff; } abbr[title], abbr[data-original-title] { cursor: help; border-bottom: 1px dotted #999999; } abbr.initialism { font-size: 90%; text-transform: uppercase; } blockquote { padding: 0 0 0 15px; margin: 0 0 20px; border-left: 5px solid #eeeeee; } blockquote p { margin-bottom: 0; font-size: 16px; font-weight: 300; line-height: 25px; } blockquote small { display: block; line-height: 20px; color: #999999; } blockquote small:before { content: '\2014 \00A0'; } blockquote.pull-right { float: right; padding-right: 15px; padding-left: 0; border-right: 5px solid #eeeeee; border-left: 0; } blockquote.pull-right p, blockquote.pull-right small { text-align: right; } blockquote.pull-right small:before { content: ''; } blockquote.pull-right small:after { content: '\00A0 \2014'; } q:before, q:after, blockquote:before, blockquote:after { content: ""; } address { display: block; margin-bottom: 20px; font-style: normal; line-height: 20px; } code, pre { padding: 0 3px 2px; font-family: Monaco, Menlo, Consolas, "Courier New", monospace; font-size: 12px; color: #333333; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } code { padding: 2px 4px; color: #d14; background-color: #f7f7f9; border: 1px solid #e1e1e8; } pre { display: block; padding: 9.5px; margin: 0 0 10px; font-size: 13px; line-height: 20px; word-break: break-all; word-wrap: break-word; white-space: pre; white-space: pre-wrap; background-color: #f5f5f5; border: 1px solid #ccc; border: 1px solid rgba(0, 0, 0, 0.15); -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } pre.prettyprint { margin-bottom: 20px; } pre code { padding: 0; color: inherit; background-color: transparent; border: 0; } .pre-scrollable { max-height: 340px; overflow-y: scroll; } form { margin: 0 0 20px; } fieldset { padding: 0; margin: 0; border: 0; } legend { display: block; width: 100%; padding: 0; margin-bottom: 20px; font-size: 21px; line-height: 40px; color: #333333; border: 0; border-bottom: 1px solid #e5e5e5; } legend small { font-size: 15px; color: #999999; } label, input, button, select, textarea { font-size: 14px; font-weight: normal; line-height: 20px; } input, button, select, textarea { font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; } label { display: block; margin-bottom: 5px; } select, textarea, input[type="text"], input[type="password"], input[type="datetime"], input[type="datetime-local"], input[type="date"], input[type="month"], input[type="time"], input[type="week"], input[type="number"], input[type="email"], input[type="url"], input[type="search"], input[type="tel"], input[type="color"], .uneditable-input { display: inline-block; height: 20px; padding: 4px 6px; margin-bottom: 10px; font-size: 14px; line-height: 20px; color: #555555; vertical-align: middle; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } input, textarea, .uneditable-input { width: 206px; } textarea { height: auto; } textarea, input[type="text"], input[type="password"], input[type="datetime"], input[type="datetime-local"], input[type="date"], input[type="month"], input[type="time"], input[type="week"], input[type="number"], input[type="email"], input[type="url"], input[type="search"], input[type="tel"], input[type="color"], .uneditable-input { background-color: #ffffff; border: 1px solid #cccccc; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -webkit-transition: border linear 0.2s, box-shadow linear 0.2s; -moz-transition: border linear 0.2s, box-shadow linear 0.2s; -o-transition: border linear 0.2s, box-shadow linear 0.2s; transition: border linear 0.2s, box-shadow linear 0.2s; } textarea:focus, input[type="text"]:focus, input[type="password"]:focus, input[type="datetime"]:focus, input[type="datetime-local"]:focus, input[type="date"]:focus, input[type="month"]:focus, input[type="time"]:focus, input[type="week"]:focus, input[type="number"]:focus, input[type="email"]:focus, input[type="url"]:focus, input[type="search"]:focus, input[type="tel"]:focus, input[type="color"]:focus, .uneditable-input:focus { border-color: rgba(82, 168, 236, 0.8); outline: 0; outline: thin dotted \9; /* IE6-9 */ -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6); } input[type="radio"], input[type="checkbox"] { margin: 4px 0 0; margin-top: 1px \9; *margin-top: 0; line-height: normal; cursor: pointer; } input[type="file"], input[type="image"], input[type="submit"], input[type="reset"], input[type="button"], input[type="radio"], input[type="checkbox"] { width: auto; } select, input[type="file"] { height: 30px; /* In IE7, the height of the select element cannot be changed by height, only font-size */ *margin-top: 4px; /* For IE7, add top margin to align select with labels */ line-height: 30px; } select { width: 220px; background-color: #ffffff; border: 1px solid #cccccc; } select[multiple], select[size] { height: auto; } select:focus, input[type="file"]:focus, input[type="radio"]:focus, input[type="checkbox"]:focus { outline: thin dotted #333; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } .uneditable-input, .uneditable-textarea { color: #999999; cursor: not-allowed; background-color: #fcfcfc; border-color: #cccccc; -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.025); } .uneditable-input { overflow: hidden; white-space: nowrap; } .uneditable-textarea { width: auto; height: auto; } input:-moz-placeholder, textarea:-moz-placeholder { color: #999999; } input:-ms-input-placeholder, textarea:-ms-input-placeholder { color: #999999; } input::-webkit-input-placeholder, textarea::-webkit-input-placeholder { color: #999999; } .radio, .checkbox { min-height: 20px; padding-left: 20px; } .radio input[type="radio"], .checkbox input[type="checkbox"] { float: left; margin-left: -20px; } .controls > .radio:first-child, .controls > .checkbox:first-child { padding-top: 5px; } .radio.inline, .checkbox.inline { display: inline-block; padding-top: 5px; margin-bottom: 0; vertical-align: middle; } .radio.inline + .radio.inline, .checkbox.inline + .checkbox.inline { margin-left: 10px; } .input-mini { width: 60px; } .input-small { width: 90px; } .input-medium { width: 150px; } .input-large { width: 210px; } .input-xlarge { width: 270px; } .input-xxlarge { width: 530px; } input[class*="span"], select[class*="span"], textarea[class*="span"], .uneditable-input[class*="span"], .row-fluid input[class*="span"], .row-fluid select[class*="span"], .row-fluid textarea[class*="span"], .row-fluid .uneditable-input[class*="span"] { float: none; margin-left: 0; } .input-append input[class*="span"], .input-append .uneditable-input[class*="span"], .input-prepend input[class*="span"], .input-prepend .uneditable-input[class*="span"], .row-fluid input[class*="span"], .row-fluid select[class*="span"], .row-fluid textarea[class*="span"], .row-fluid .uneditable-input[class*="span"], .row-fluid .input-prepend [class*="span"], .row-fluid .input-append [class*="span"] { display: inline-block; } input, textarea, .uneditable-input { margin-left: 0; } .controls-row [class*="span"] + [class*="span"] { margin-left: 20px; } input.span12, textarea.span12, .uneditable-input.span12 { width: 926px; } input.span11, textarea.span11, .uneditable-input.span11 { width: 846px; } input.span10, textarea.span10, .uneditable-input.span10 { width: 766px; } input.span9, textarea.span9, .uneditable-input.span9 { width: 686px; } input.span8, textarea.span8, .uneditable-input.span8 { width: 606px; } input.span7, textarea.span7, .uneditable-input.span7 { width: 526px; } input.span6, textarea.span6, .uneditable-input.span6 { width: 446px; } input.span5, textarea.span5, .uneditable-input.span5 { width: 366px; } input.span4, textarea.span4, .uneditable-input.span4 { width: 286px; } input.span3, textarea.span3, .uneditable-input.span3 { width: 206px; } input.span2, textarea.span2, .uneditable-input.span2 { width: 126px; } input.span1, textarea.span1, .uneditable-input.span1 { width: 46px; } .controls-row { *zoom: 1; } .controls-row:before, .controls-row:after { display: table; line-height: 0; content: ""; } .controls-row:after { clear: both; } .controls-row [class*="span"], .row-fluid .controls-row [class*="span"] { float: left; } .controls-row .checkbox[class*="span"], .controls-row .radio[class*="span"] { padding-top: 5px; } input[disabled], select[disabled], textarea[disabled], input[readonly], select[readonly], textarea[readonly] { cursor: not-allowed; background-color: #eeeeee; } input[type="radio"][disabled], input[type="checkbox"][disabled], input[type="radio"][readonly], input[type="checkbox"][readonly] { background-color: transparent; } .control-group.warning > label, .control-group.warning .help-block, .control-group.warning .help-inline { color: #c09853; } .control-group.warning .checkbox, .control-group.warning .radio, .control-group.warning input, .control-group.warning select, .control-group.warning textarea { color: #c09853; } .control-group.warning input, .control-group.warning select, .control-group.warning textarea { border-color: #c09853; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); } .control-group.warning input:focus, .control-group.warning select:focus, .control-group.warning textarea:focus { border-color: #a47e3c; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e; -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e; box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e; } .control-group.warning .input-prepend .add-on, .control-group.warning .input-append .add-on { color: #c09853; background-color: #fcf8e3; border-color: #c09853; } .control-group.error > label, .control-group.error .help-block, .control-group.error .help-inline { color: #b94a48; } .control-group.error .checkbox, .control-group.error .radio, .control-group.error input, .control-group.error select, .control-group.error textarea { color: #b94a48; } .control-group.error input, .control-group.error select, .control-group.error textarea { border-color: #b94a48; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); } .control-group.error input:focus, .control-group.error select:focus, .control-group.error textarea:focus { border-color: #953b39; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392; -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392; box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392; } .control-group.error .input-prepend .add-on, .control-group.error .input-append .add-on { color: #b94a48; background-color: #f2dede; border-color: #b94a48; } .control-group.success > label, .control-group.success .help-block, .control-group.success .help-inline { color: #468847; } .control-group.success .checkbox, .control-group.success .radio, .control-group.success input, .control-group.success select, .control-group.success textarea { color: #468847; } .control-group.success input, .control-group.success select, .control-group.success textarea { border-color: #468847; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); } .control-group.success input:focus, .control-group.success select:focus, .control-group.success textarea:focus { border-color: #356635; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b; -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b; box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b; } .control-group.success .input-prepend .add-on, .control-group.success .input-append .add-on { color: #468847; background-color: #dff0d8; border-color: #468847; } .control-group.info > label, .control-group.info .help-block, .control-group.info .help-inline { color: #3a87ad; } .control-group.info .checkbox, .control-group.info .radio, .control-group.info input, .control-group.info select, .control-group.info textarea { color: #3a87ad; } .control-group.info input, .control-group.info select, .control-group.info textarea { border-color: #3a87ad; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); } .control-group.info input:focus, .control-group.info select:focus, .control-group.info textarea:focus { border-color: #2d6987; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3; -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3; box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3; } .control-group.info .input-prepend .add-on, .control-group.info .input-append .add-on { color: #3a87ad; background-color: #d9edf7; border-color: #3a87ad; } input:focus:required:invalid, textarea:focus:required:invalid, select:focus:required:invalid { color: #b94a48; border-color: #ee5f5b; } input:focus:required:invalid:focus, textarea:focus:required:invalid:focus, select:focus:required:invalid:focus { border-color: #e9322d; -webkit-box-shadow: 0 0 6px #f8b9b7; -moz-box-shadow: 0 0 6px #f8b9b7; box-shadow: 0 0 6px #f8b9b7; } .form-actions { padding: 19px 20px 20px; margin-top: 20px; margin-bottom: 20px; background-color: #f5f5f5; border-top: 1px solid #e5e5e5; *zoom: 1; } .form-actions:before, .form-actions:after { display: table; line-height: 0; content: ""; } .form-actions:after { clear: both; } .help-block, .help-inline { color: #595959; } .help-block { display: block; margin-bottom: 10px; } .help-inline { display: inline-block; *display: inline; padding-left: 5px; vertical-align: middle; *zoom: 1; } .input-append, .input-prepend { margin-bottom: 5px; font-size: 0; white-space: nowrap; } .input-append input, .input-prepend input, .input-append select, .input-prepend select, .input-append .uneditable-input, .input-prepend .uneditable-input, .input-append .dropdown-menu, .input-prepend .dropdown-menu { font-size: 14px; } .input-append input, .input-prepend input, .input-append select, .input-prepend select, .input-append .uneditable-input, .input-prepend .uneditable-input { position: relative; margin-bottom: 0; *margin-left: 0; vertical-align: top; -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .input-append input:focus, .input-prepend input:focus, .input-append select:focus, .input-prepend select:focus, .input-append .uneditable-input:focus, .input-prepend .uneditable-input:focus { z-index: 2; } .input-append .add-on, .input-prepend .add-on { display: inline-block; width: auto; height: 20px; min-width: 16px; padding: 4px 5px; font-size: 14px; font-weight: normal; line-height: 20px; text-align: center; text-shadow: 0 1px 0 #ffffff; background-color: #eeeeee; border: 1px solid #ccc; } .input-append .add-on, .input-prepend .add-on, .input-append .btn, .input-prepend .btn { vertical-align: top; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .input-append .active, .input-prepend .active { background-color: #a9dba9; border-color: #46a546; } .input-prepend .add-on, .input-prepend .btn { margin-right: -1px; } .input-prepend .add-on:first-child, .input-prepend .btn:first-child { -webkit-border-radius: 4px 0 0 4px; -moz-border-radius: 4px 0 0 4px; border-radius: 4px 0 0 4px; } .input-append input, .input-append select, .input-append .uneditable-input { -webkit-border-radius: 4px 0 0 4px; -moz-border-radius: 4px 0 0 4px; border-radius: 4px 0 0 4px; } .input-append input + .btn-group .btn, .input-append select + .btn-group .btn, .input-append .uneditable-input + .btn-group .btn { -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .input-append .add-on, .input-append .btn, .input-append .btn-group { margin-left: -1px; } .input-append .add-on:last-child, .input-append .btn:last-child { -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .input-prepend.input-append input, .input-prepend.input-append select, .input-prepend.input-append .uneditable-input { -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .input-prepend.input-append input + .btn-group .btn, .input-prepend.input-append select + .btn-group .btn, .input-prepend.input-append .uneditable-input + .btn-group .btn { -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .input-prepend.input-append .add-on:first-child, .input-prepend.input-append .btn:first-child { margin-right: -1px; -webkit-border-radius: 4px 0 0 4px; -moz-border-radius: 4px 0 0 4px; border-radius: 4px 0 0 4px; } .input-prepend.input-append .add-on:last-child, .input-prepend.input-append .btn:last-child { margin-left: -1px; -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .input-prepend.input-append .btn-group:first-child { margin-left: 0; } input.search-query { padding-right: 14px; padding-right: 4px \9; padding-left: 14px; padding-left: 4px \9; /* IE7-8 doesn't have border-radius, so don't indent the padding */ margin-bottom: 0; -webkit-border-radius: 15px; -moz-border-radius: 15px; border-radius: 15px; } /* Allow for input prepend/append in search forms */ .form-search .input-append .search-query, .form-search .input-prepend .search-query { -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .form-search .input-append .search-query { -webkit-border-radius: 14px 0 0 14px; -moz-border-radius: 14px 0 0 14px; border-radius: 14px 0 0 14px; } .form-search .input-append .btn { -webkit-border-radius: 0 14px 14px 0; -moz-border-radius: 0 14px 14px 0; border-radius: 0 14px 14px 0; } .form-search .input-prepend .search-query { -webkit-border-radius: 0 14px 14px 0; -moz-border-radius: 0 14px 14px 0; border-radius: 0 14px 14px 0; } .form-search .input-prepend .btn { -webkit-border-radius: 14px 0 0 14px; -moz-border-radius: 14px 0 0 14px; border-radius: 14px 0 0 14px; } .form-search input, .form-inline input, .form-horizontal input, .form-search textarea, .form-inline textarea, .form-horizontal textarea, .form-search select, .form-inline select, .form-horizontal select, .form-search .help-inline, .form-inline .help-inline, .form-horizontal .help-inline, .form-search .uneditable-input, .form-inline .uneditable-input, .form-horizontal .uneditable-input, .form-search .input-prepend, .form-inline .input-prepend, .form-horizontal .input-prepend, .form-search .input-append, .form-inline .input-append, .form-horizontal .input-append { display: inline-block; *display: inline; margin-bottom: 0; vertical-align: middle; *zoom: 1; } .form-search .hide, .form-inline .hide, .form-horizontal .hide { display: none; } .form-search label, .form-inline label, .form-search .btn-group, .form-inline .btn-group { display: inline-block; } .form-search .input-append, .form-inline .input-append, .form-search .input-prepend, .form-inline .input-prepend { margin-bottom: 0; } .form-search .radio, .form-search .checkbox, .form-inline .radio, .form-inline .checkbox { padding-left: 0; margin-bottom: 0; vertical-align: middle; } .form-search .radio input[type="radio"], .form-search .checkbox input[type="checkbox"], .form-inline .radio input[type="radio"], .form-inline .checkbox input[type="checkbox"] { float: left; margin-right: 3px; margin-left: 0; } .control-group { margin-bottom: 10px; } legend + .control-group { margin-top: 20px; -webkit-margin-top-collapse: separate; } .form-horizontal .control-group { margin-bottom: 20px; *zoom: 1; } .form-horizontal .control-group:before, .form-horizontal .control-group:after { display: table; line-height: 0; content: ""; } .form-horizontal .control-group:after { clear: both; } .form-horizontal .control-label { float: left; width: 160px; padding-top: 5px; text-align: right; } .form-horizontal .controls { *display: inline-block; *padding-left: 20px; margin-left: 180px; *margin-left: 0; } .form-horizontal .controls:first-child { *padding-left: 180px; } .form-horizontal .help-block { margin-bottom: 0; } .form-horizontal input + .help-block, .form-horizontal select + .help-block, .form-horizontal textarea + .help-block { margin-top: 10px; } .form-horizontal .form-actions { padding-left: 180px; } table { max-width: 100%; background-color: transparent; border-collapse: collapse; border-spacing: 0; } .table { width: 100%; margin-bottom: 20px; } .table th, .table td { padding: 8px; line-height: 20px; text-align: left; vertical-align: top; border-top: 1px solid #dddddd; } .table th { font-weight: bold; } .table thead th { vertical-align: bottom; } .table caption + thead tr:first-child th, .table caption + thead tr:first-child td, .table colgroup + thead tr:first-child th, .table colgroup + thead tr:first-child td, .table thead:first-child tr:first-child th, .table thead:first-child tr:first-child td { border-top: 0; } .table tbody + tbody { border-top: 2px solid #dddddd; } .table-condensed th, .table-condensed td { padding: 4px 5px; } .table-bordered { border: 1px solid #dddddd; border-collapse: separate; *border-collapse: collapse; border-left: 0; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .table-bordered th, .table-bordered td { border-left: 1px solid #dddddd; } .table-bordered caption + thead tr:first-child th, .table-bordered caption + tbody tr:first-child th, .table-bordered caption + tbody tr:first-child td, .table-bordered colgroup + thead tr:first-child th, .table-bordered colgroup + tbody tr:first-child th, .table-bordered colgroup + tbody tr:first-child td, .table-bordered thead:first-child tr:first-child th, .table-bordered tbody:first-child tr:first-child th, .table-bordered tbody:first-child tr:first-child td { border-top: 0; } .table-bordered thead:first-child tr:first-child th:first-child, .table-bordered tbody:first-child tr:first-child td:first-child { -webkit-border-top-left-radius: 4px; border-top-left-radius: 4px; -moz-border-radius-topleft: 4px; } .table-bordered thead:first-child tr:first-child th:last-child, .table-bordered tbody:first-child tr:first-child td:last-child { -webkit-border-top-right-radius: 4px; border-top-right-radius: 4px; -moz-border-radius-topright: 4px; } .table-bordered thead:last-child tr:last-child th:first-child, .table-bordered tbody:last-child tr:last-child td:first-child, .table-bordered tfoot:last-child tr:last-child td:first-child { -webkit-border-radius: 0 0 0 4px; -moz-border-radius: 0 0 0 4px; border-radius: 0 0 0 4px; -webkit-border-bottom-left-radius: 4px; border-bottom-left-radius: 4px; -moz-border-radius-bottomleft: 4px; } .table-bordered thead:last-child tr:last-child th:last-child, .table-bordered tbody:last-child tr:last-child td:last-child, .table-bordered tfoot:last-child tr:last-child td:last-child { -webkit-border-bottom-right-radius: 4px; border-bottom-right-radius: 4px; -moz-border-radius-bottomright: 4px; } .table-bordered caption + thead tr:first-child th:first-child, .table-bordered caption + tbody tr:first-child td:first-child, .table-bordered colgroup + thead tr:first-child th:first-child, .table-bordered colgroup + tbody tr:first-child td:first-child { -webkit-border-top-left-radius: 4px; border-top-left-radius: 4px; -moz-border-radius-topleft: 4px; } .table-bordered caption + thead tr:first-child th:last-child, .table-bordered caption + tbody tr:first-child td:last-child, .table-bordered colgroup + thead tr:first-child th:last-child, .table-bordered colgroup + tbody tr:first-child td:last-child { -webkit-border-top-right-radius: 4px; border-top-right-radius: 4px; -moz-border-radius-topright: 4px; } .table-striped tbody tr:nth-child(odd) td, .table-striped tbody tr:nth-child(odd) th { background-color: #f9f9f9; } .table-hover tbody tr:hover td, .table-hover tbody tr:hover th { background-color: #f5f5f5; } table td[class*="span"], table th[class*="span"], .row-fluid table td[class*="span"], .row-fluid table th[class*="span"] { display: table-cell; float: none; margin-left: 0; } .table td.span1, .table th.span1 { float: none; width: 44px; margin-left: 0; } .table td.span2, .table th.span2 { float: none; width: 124px; margin-left: 0; } .table td.span3, .table th.span3 { float: none; width: 204px; margin-left: 0; } .table td.span4, .table th.span4 { float: none; width: 284px; margin-left: 0; } .table td.span5, .table th.span5 { float: none; width: 364px; margin-left: 0; } .table td.span6, .table th.span6 { float: none; width: 444px; margin-left: 0; } .table td.span7, .table th.span7 { float: none; width: 524px; margin-left: 0; } .table td.span8, .table th.span8 { float: none; width: 604px; margin-left: 0; } .table td.span9, .table th.span9 { float: none; width: 684px; margin-left: 0; } .table td.span10, .table th.span10 { float: none; width: 764px; margin-left: 0; } .table td.span11, .table th.span11 { float: none; width: 844px; margin-left: 0; } .table td.span12, .table th.span12 { float: none; width: 924px; margin-left: 0; } .table tbody tr.success td { background-color: #dff0d8; } .table tbody tr.error td { background-color: #f2dede; } .table tbody tr.warning td { background-color: #fcf8e3; } .table tbody tr.info td { background-color: #d9edf7; } .table-hover tbody tr.success:hover td { background-color: #d0e9c6; } .table-hover tbody tr.error:hover td { background-color: #ebcccc; } .table-hover tbody tr.warning:hover td { background-color: #faf2cc; } .table-hover tbody tr.info:hover td { background-color: #c4e3f3; } [class^="icon-"], [class*=" icon-"] { display: inline-block; width: 14px; height: 14px; margin-top: 1px; *margin-right: .3em; line-height: 14px; vertical-align: text-top; background-image: url("../img/glyphicons-halflings.png"); background-position: 14px 14px; background-repeat: no-repeat; } /* White icons with optional class, or on hover/active states of certain elements */ .icon-white, .nav-pills > .active > a > [class^="icon-"], .nav-pills > .active > a > [class*=" icon-"], .nav-list > .active > a > [class^="icon-"], .nav-list > .active > a > [class*=" icon-"], .navbar-inverse .nav > .active > a > [class^="icon-"], .navbar-inverse .nav > .active > a > [class*=" icon-"], .dropdown-menu > li > a:hover > [class^="icon-"], .dropdown-menu > li > a:hover > [class*=" icon-"], .dropdown-menu > .active > a > [class^="icon-"], .dropdown-menu > .active > a > [class*=" icon-"], .dropdown-submenu:hover > a > [class^="icon-"], .dropdown-submenu:hover > a > [class*=" icon-"] { background-image: url("../img/glyphicons-halflings-white.png"); } .icon-glass { background-position: 0 0; } .icon-music { background-position: -24px 0; } .icon-search { background-position: -48px 0; } .icon-envelope { background-position: -72px 0; } .icon-heart { background-position: -96px 0; } .icon-star { background-position: -120px 0; } .icon-star-empty { background-position: -144px 0; } .icon-user { background-position: -168px 0; } .icon-film { background-position: -192px 0; } .icon-th-large { background-position: -216px 0; } .icon-th { background-position: -240px 0; } .icon-th-list { background-position: -264px 0; } .icon-ok { background-position: -288px 0; } .icon-remove { background-position: -312px 0; } .icon-zoom-in { background-position: -336px 0; } .icon-zoom-out { background-position: -360px 0; } .icon-off { background-position: -384px 0; } .icon-signal { background-position: -408px 0; } .icon-cog { background-position: -432px 0; } .icon-trash { background-position: -456px 0; } .icon-home { background-position: 0 -24px; } .icon-file { background-position: -24px -24px; } .icon-time { background-position: -48px -24px; } .icon-road { background-position: -72px -24px; } .icon-download-alt { background-position: -96px -24px; } .icon-download { background-position: -120px -24px; } .icon-upload { background-position: -144px -24px; } .icon-inbox { background-position: -168px -24px; } .icon-play-circle { background-position: -192px -24px; } .icon-repeat { background-position: -216px -24px; } .icon-refresh { background-position: -240px -24px; } .icon-list-alt { background-position: -264px -24px; } .icon-lock { background-position: -287px -24px; } .icon-flag { background-position: -312px -24px; } .icon-headphones { background-position: -336px -24px; } .icon-volume-off { background-position: -360px -24px; } .icon-volume-down { background-position: -384px -24px; } .icon-volume-up { background-position: -408px -24px; } .icon-qrcode { background-position: -432px -24px; } .icon-barcode { background-position: -456px -24px; } .icon-tag { background-position: 0 -48px; } .icon-tags { background-position: -25px -48px; } .icon-book { background-position: -48px -48px; } .icon-bookmark { background-position: -72px -48px; } .icon-print { background-position: -96px -48px; } .icon-camera { background-position: -120px -48px; } .icon-font { background-position: -144px -48px; } .icon-bold { background-position: -167px -48px; } .icon-italic { background-position: -192px -48px; } .icon-text-height { background-position: -216px -48px; } .icon-text-width { background-position: -240px -48px; } .icon-align-left { background-position: -264px -48px; } .icon-align-center { background-position: -288px -48px; } .icon-align-right { background-position: -312px -48px; } .icon-align-justify { background-position: -336px -48px; } .icon-list { background-position: -360px -48px; } .icon-indent-left { background-position: -384px -48px; } .icon-indent-right { background-position: -408px -48px; } .icon-facetime-video { background-position: -432px -48px; } .icon-picture { background-position: -456px -48px; } .icon-pencil { background-position: 0 -72px; } .icon-map-marker { background-position: -24px -72px; } .icon-adjust { background-position: -48px -72px; } .icon-tint { background-position: -72px -72px; } .icon-edit { background-position: -96px -72px; } .icon-share { background-position: -120px -72px; } .icon-check { background-position: -144px -72px; } .icon-move { background-position: -168px -72px; } .icon-step-backward { background-position: -192px -72px; } .icon-fast-backward { background-position: -216px -72px; } .icon-backward { background-position: -240px -72px; } .icon-play { background-position: -264px -72px; } .icon-pause { background-position: -288px -72px; } .icon-stop { background-position: -312px -72px; } .icon-forward { background-position: -336px -72px; } .icon-fast-forward { background-position: -360px -72px; } .icon-step-forward { background-position: -384px -72px; } .icon-eject { background-position: -408px -72px; } .icon-chevron-left { background-position: -432px -72px; } .icon-chevron-right { background-position: -456px -72px; } .icon-plus-sign { background-position: 0 -96px; } .icon-minus-sign { background-position: -24px -96px; } .icon-remove-sign { background-position: -48px -96px; } .icon-ok-sign { background-position: -72px -96px; } .icon-question-sign { background-position: -96px -96px; } .icon-info-sign { background-position: -120px -96px; } .icon-screenshot { background-position: -144px -96px; } .icon-remove-circle { background-position: -168px -96px; } .icon-ok-circle { background-position: -192px -96px; } .icon-ban-circle { background-position: -216px -96px; } .icon-arrow-left { background-position: -240px -96px; } .icon-arrow-right { background-position: -264px -96px; } .icon-arrow-up { background-position: -289px -96px; } .icon-arrow-down { background-position: -312px -96px; } .icon-share-alt { background-position: -336px -96px; } .icon-resize-full { background-position: -360px -96px; } .icon-resize-small { background-position: -384px -96px; } .icon-plus { background-position: -408px -96px; } .icon-minus { background-position: -433px -96px; } .icon-asterisk { background-position: -456px -96px; } .icon-exclamation-sign { background-position: 0 -120px; } .icon-gift { background-position: -24px -120px; } .icon-leaf { background-position: -48px -120px; } .icon-fire { background-position: -72px -120px; } .icon-eye-open { background-position: -96px -120px; } .icon-eye-close { background-position: -120px -120px; } .icon-warning-sign { background-position: -144px -120px; } .icon-plane { background-position: -168px -120px; } .icon-calendar { background-position: -192px -120px; } .icon-random { width: 16px; background-position: -216px -120px; } .icon-comment { background-position: -240px -120px; } .icon-magnet { background-position: -264px -120px; } .icon-chevron-up { background-position: -288px -120px; } .icon-chevron-down { background-position: -313px -119px; } .icon-retweet { background-position: -336px -120px; } .icon-shopping-cart { background-position: -360px -120px; } .icon-folder-close { background-position: -384px -120px; } .icon-folder-open { width: 16px; background-position: -408px -120px; } .icon-resize-vertical { background-position: -432px -119px; } .icon-resize-horizontal { background-position: -456px -118px; } .icon-hdd { background-position: 0 -144px; } .icon-bullhorn { background-position: -24px -144px; } .icon-bell { background-position: -48px -144px; } .icon-certificate { background-position: -72px -144px; } .icon-thumbs-up { background-position: -96px -144px; } .icon-thumbs-down { background-position: -120px -144px; } .icon-hand-right { background-position: -144px -144px; } .icon-hand-left { background-position: -168px -144px; } .icon-hand-up { background-position: -192px -144px; } .icon-hand-down { background-position: -216px -144px; } .icon-circle-arrow-right { background-position: -240px -144px; } .icon-circle-arrow-left { background-position: -264px -144px; } .icon-circle-arrow-up { background-position: -288px -144px; } .icon-circle-arrow-down { background-position: -312px -144px; } .icon-globe { background-position: -336px -144px; } .icon-wrench { background-position: -360px -144px; } .icon-tasks { background-position: -384px -144px; } .icon-filter { background-position: -408px -144px; } .icon-briefcase { background-position: -432px -144px; } .icon-fullscreen { background-position: -456px -144px; } .dropup, .dropdown { position: relative; } .dropdown-toggle { *margin-bottom: -3px; } .dropdown-toggle:active, .open .dropdown-toggle { outline: 0; } .caret { display: inline-block; width: 0; height: 0; vertical-align: top; border-top: 4px solid #000000; border-right: 4px solid transparent; border-left: 4px solid transparent; content: ""; } .dropdown .caret { margin-top: 8px; margin-left: 2px; } .dropdown-menu { position: absolute; top: 100%; left: 0; z-index: 1000; display: none; float: left; min-width: 160px; padding: 5px 0; margin: 2px 0 0; list-style: none; background-color: #ffffff; border: 1px solid #ccc; border: 1px solid rgba(0, 0, 0, 0.2); *border-right-width: 2px; *border-bottom-width: 2px; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; -webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); -moz-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); -webkit-background-clip: padding-box; -moz-background-clip: padding; background-clip: padding-box; } .dropdown-menu.pull-right { right: 0; left: auto; } .dropdown-menu .divider { *width: 100%; height: 1px; margin: 9px 1px; *margin: -5px 0 5px; overflow: hidden; background-color: #e5e5e5; border-bottom: 1px solid #ffffff; } .dropdown-menu li > a { display: block; padding: 3px 20px; clear: both; font-weight: normal; line-height: 20px; color: #333333; white-space: nowrap; } .dropdown-menu li > a:hover, .dropdown-menu li > a:focus, .dropdown-submenu:hover > a { color: #ffffff; text-decoration: none; background-color: #0081c2; background-image: -moz-linear-gradient(top, #0088cc, #0077b3); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3)); background-image: -webkit-linear-gradient(top, #0088cc, #0077b3); background-image: -o-linear-gradient(top, #0088cc, #0077b3); background-image: linear-gradient(to bottom, #0088cc, #0077b3); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0); } .dropdown-menu .active > a, .dropdown-menu .active > a:hover { color: #333333; text-decoration: none; background-color: #0081c2; background-image: -moz-linear-gradient(top, #0088cc, #0077b3); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3)); background-image: -webkit-linear-gradient(top, #0088cc, #0077b3); background-image: -o-linear-gradient(top, #0088cc, #0077b3); background-image: linear-gradient(to bottom, #0088cc, #0077b3); background-repeat: repeat-x; outline: 0; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0); } .dropdown-menu .disabled > a, .dropdown-menu .disabled > a:hover { color: #999999; } .dropdown-menu .disabled > a:hover { text-decoration: none; cursor: default; background-color: transparent; background-image: none; } .open { *z-index: 1000; } .open > .dropdown-menu { display: block; } .pull-right > .dropdown-menu { right: 0; left: auto; } .dropup .caret, .navbar-fixed-bottom .dropdown .caret { border-top: 0; border-bottom: 4px solid #000000; content: ""; } .dropup .dropdown-menu, .navbar-fixed-bottom .dropdown .dropdown-menu { top: auto; bottom: 100%; margin-bottom: 1px; } .dropdown-submenu { position: relative; } .dropdown-submenu > .dropdown-menu { top: 0; left: 100%; margin-top: -6px; margin-left: -1px; -webkit-border-radius: 0 6px 6px 6px; -moz-border-radius: 0 6px 6px 6px; border-radius: 0 6px 6px 6px; } .dropdown-submenu:hover > .dropdown-menu { display: block; } .dropup .dropdown-submenu > .dropdown-menu { top: auto; bottom: 0; margin-top: 0; margin-bottom: -2px; -webkit-border-radius: 5px 5px 5px 0; -moz-border-radius: 5px 5px 5px 0; border-radius: 5px 5px 5px 0; } .dropdown-submenu > a:after { display: block; float: right; width: 0; height: 0; margin-top: 5px; margin-right: -10px; border-color: transparent; border-left-color: #cccccc; border-style: solid; border-width: 5px 0 5px 5px; content: " "; } .dropdown-submenu:hover > a:after { border-left-color: #ffffff; } .dropdown-submenu.pull-left { float: none; } .dropdown-submenu.pull-left > .dropdown-menu { left: -100%; margin-left: 10px; -webkit-border-radius: 6px 0 6px 6px; -moz-border-radius: 6px 0 6px 6px; border-radius: 6px 0 6px 6px; } .dropdown .dropdown-menu .nav-header { padding-right: 20px; padding-left: 20px; } .typeahead { margin-top: 2px; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .well { min-height: 20px; padding: 19px; margin-bottom: 20px; background-color: #f5f5f5; border: 1px solid #e3e3e3; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05); } .well blockquote { border-color: #ddd; border-color: rgba(0, 0, 0, 0.15); } .well-large { padding: 24px; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; } .well-small { padding: 9px; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } .fade { opacity: 0; -webkit-transition: opacity 0.15s linear; -moz-transition: opacity 0.15s linear; -o-transition: opacity 0.15s linear; transition: opacity 0.15s linear; } .fade.in { opacity: 1; } .collapse { position: relative; height: 0; overflow: hidden; -webkit-transition: height 0.35s ease; -moz-transition: height 0.35s ease; -o-transition: height 0.35s ease; transition: height 0.35s ease; } .collapse.in { height: auto; } .close { float: right; font-size: 20px; font-weight: bold; line-height: 20px; color: #000000; text-shadow: 0 1px 0 #ffffff; opacity: 0.2; filter: alpha(opacity=20); } .close:hover { color: #000000; text-decoration: none; cursor: pointer; opacity: 0.4; filter: alpha(opacity=40); } button.close { padding: 0; cursor: pointer; background: transparent; border: 0; -webkit-appearance: none; } .btn { display: inline-block; *display: inline; padding: 4px 12px; margin-bottom: 0; *margin-left: .3em; font-size: 14px; line-height: 20px; *line-height: 20px; color: #333333; text-align: center; text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); vertical-align: middle; cursor: pointer; background-color: #f5f5f5; *background-color: #e6e6e6; background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6)); background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6); background-image: -o-linear-gradient(top, #ffffff, #e6e6e6); background-image: linear-gradient(to bottom, #ffffff, #e6e6e6); background-repeat: repeat-x; border: 1px solid #bbbbbb; *border: 0; border-color: #e6e6e6 #e6e6e6 #bfbfbf; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); border-bottom-color: #a2a2a2; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe6e6e6', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); *zoom: 1; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn:hover, .btn:active, .btn.active, .btn.disabled, .btn[disabled] { color: #333333; background-color: #e6e6e6; *background-color: #d9d9d9; } .btn:active, .btn.active { background-color: #cccccc \9; } .btn:first-child { *margin-left: 0; } .btn:hover { color: #333333; text-decoration: none; background-color: #e6e6e6; *background-color: #d9d9d9; /* Buttons in IE7 don't get borders, so darken on hover */ background-position: 0 -15px; -webkit-transition: background-position 0.1s linear; -moz-transition: background-position 0.1s linear; -o-transition: background-position 0.1s linear; transition: background-position 0.1s linear; } .btn:focus { outline: thin dotted #333; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } .btn.active, .btn:active { background-color: #e6e6e6; background-color: #d9d9d9 \9; background-image: none; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn.disabled, .btn[disabled] { cursor: default; background-color: #e6e6e6; background-image: none; opacity: 0.65; filter: alpha(opacity=65); -webkit-box-shadow: none; -moz-box-shadow: none; box-shadow: none; } .btn-large { padding: 11px 19px; font-size: 17.5px; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; } .btn-large [class^="icon-"], .btn-large [class*=" icon-"] { margin-top: 2px; } .btn-small { padding: 2px 10px; font-size: 11.9px; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } .btn-small [class^="icon-"], .btn-small [class*=" icon-"] { margin-top: 0; } .btn-mini { padding: 1px 6px; font-size: 10.5px; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } .btn-block { display: block; width: 100%; padding-right: 0; padding-left: 0; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .btn-block + .btn-block { margin-top: 5px; } input[type="submit"].btn-block, input[type="reset"].btn-block, input[type="button"].btn-block { width: 100%; } .btn-primary.active, .btn-warning.active, .btn-danger.active, .btn-success.active, .btn-info.active, .btn-inverse.active { color: rgba(255, 255, 255, 0.75); } .btn { border-color: #c5c5c5; border-color: rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.25); } .btn-primary { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #006dcc; *background-color: #0044cc; background-image: -moz-linear-gradient(top, #0088cc, #0044cc); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc)); background-image: -webkit-linear-gradient(top, #0088cc, #0044cc); background-image: -o-linear-gradient(top, #0088cc, #0044cc); background-image: linear-gradient(to bottom, #0088cc, #0044cc); background-repeat: repeat-x; border-color: #0044cc #0044cc #002a80; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0044cc', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-primary:hover, .btn-primary:active, .btn-primary.active, .btn-primary.disabled, .btn-primary[disabled] { color: #ffffff; background-color: #0044cc; *background-color: #003bb3; } .btn-primary:active, .btn-primary.active { background-color: #003399 \9; } .btn-warning { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #faa732; *background-color: #f89406; background-image: -moz-linear-gradient(top, #fbb450, #f89406); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406)); background-image: -webkit-linear-gradient(top, #fbb450, #f89406); background-image: -o-linear-gradient(top, #fbb450, #f89406); background-image: linear-gradient(to bottom, #fbb450, #f89406); background-repeat: repeat-x; border-color: #f89406 #f89406 #ad6704; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-warning:hover, .btn-warning:active, .btn-warning.active, .btn-warning.disabled, .btn-warning[disabled] { color: #ffffff; background-color: #f89406; *background-color: #df8505; } .btn-warning:active, .btn-warning.active { background-color: #c67605 \9; } .btn-danger { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #da4f49; *background-color: #bd362f; background-image: -moz-linear-gradient(top, #ee5f5b, #bd362f); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#bd362f)); background-image: -webkit-linear-gradient(top, #ee5f5b, #bd362f); background-image: -o-linear-gradient(top, #ee5f5b, #bd362f); background-image: linear-gradient(to bottom, #ee5f5b, #bd362f); background-repeat: repeat-x; border-color: #bd362f #bd362f #802420; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b', endColorstr='#ffbd362f', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-danger:hover, .btn-danger:active, .btn-danger.active, .btn-danger.disabled, .btn-danger[disabled] { color: #ffffff; background-color: #bd362f; *background-color: #a9302a; } .btn-danger:active, .btn-danger.active { background-color: #942a25 \9; } .btn-success { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #5bb75b; *background-color: #51a351; background-image: -moz-linear-gradient(top, #62c462, #51a351); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#51a351)); background-image: -webkit-linear-gradient(top, #62c462, #51a351); background-image: -o-linear-gradient(top, #62c462, #51a351); background-image: linear-gradient(to bottom, #62c462, #51a351); background-repeat: repeat-x; border-color: #51a351 #51a351 #387038; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462', endColorstr='#ff51a351', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-success:hover, .btn-success:active, .btn-success.active, .btn-success.disabled, .btn-success[disabled] { color: #ffffff; background-color: #51a351; *background-color: #499249; } .btn-success:active, .btn-success.active { background-color: #408140 \9; } .btn-info { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #49afcd; *background-color: #2f96b4; background-image: -moz-linear-gradient(top, #5bc0de, #2f96b4); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#2f96b4)); background-image: -webkit-linear-gradient(top, #5bc0de, #2f96b4); background-image: -o-linear-gradient(top, #5bc0de, #2f96b4); background-image: linear-gradient(to bottom, #5bc0de, #2f96b4); background-repeat: repeat-x; border-color: #2f96b4 #2f96b4 #1f6377; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2f96b4', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-info:hover, .btn-info:active, .btn-info.active, .btn-info.disabled, .btn-info[disabled] { color: #ffffff; background-color: #2f96b4; *background-color: #2a85a0; } .btn-info:active, .btn-info.active { background-color: #24748c \9; } .btn-inverse { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #363636; *background-color: #222222; background-image: -moz-linear-gradient(top, #444444, #222222); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#444444), to(#222222)); background-image: -webkit-linear-gradient(top, #444444, #222222); background-image: -o-linear-gradient(top, #444444, #222222); background-image: linear-gradient(to bottom, #444444, #222222); background-repeat: repeat-x; border-color: #222222 #222222 #000000; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff444444', endColorstr='#ff222222', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .btn-inverse:hover, .btn-inverse:active, .btn-inverse.active, .btn-inverse.disabled, .btn-inverse[disabled] { color: #ffffff; background-color: #222222; *background-color: #151515; } .btn-inverse:active, .btn-inverse.active { background-color: #080808 \9; } button.btn, input[type="submit"].btn { *padding-top: 3px; *padding-bottom: 3px; } button.btn::-moz-focus-inner, input[type="submit"].btn::-moz-focus-inner { padding: 0; border: 0; } button.btn.btn-large, input[type="submit"].btn.btn-large { *padding-top: 7px; *padding-bottom: 7px; } button.btn.btn-small, input[type="submit"].btn.btn-small { *padding-top: 3px; *padding-bottom: 3px; } button.btn.btn-mini, input[type="submit"].btn.btn-mini { *padding-top: 1px; *padding-bottom: 1px; } .btn-link, .btn-link:active, .btn-link[disabled] { background-color: transparent; background-image: none; -webkit-box-shadow: none; -moz-box-shadow: none; box-shadow: none; } .btn-link { color: #0088cc; cursor: pointer; border-color: transparent; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .btn-link:hover { color: #005580; text-decoration: underline; background-color: transparent; } .btn-link[disabled]:hover { color: #333333; text-decoration: none; } .btn-group { position: relative; display: inline-block; *display: inline; *margin-left: .3em; font-size: 0; white-space: nowrap; vertical-align: middle; *zoom: 1; } .btn-group:first-child { *margin-left: 0; } .btn-group + .btn-group { margin-left: 5px; } .btn-toolbar { margin-top: 10px; margin-bottom: 10px; font-size: 0; } .btn-toolbar .btn + .btn, .btn-toolbar .btn-group + .btn, .btn-toolbar .btn + .btn-group { margin-left: 5px; } .btn-group > .btn { position: relative; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .btn-group > .btn + .btn { margin-left: -1px; } .btn-group > .btn, .btn-group > .dropdown-menu { font-size: 14px; } .btn-group > .btn-mini { font-size: 11px; } .btn-group > .btn-small { font-size: 12px; } .btn-group > .btn-large { font-size: 16px; } .btn-group > .btn:first-child { margin-left: 0; -webkit-border-bottom-left-radius: 4px; border-bottom-left-radius: 4px; -webkit-border-top-left-radius: 4px; border-top-left-radius: 4px; -moz-border-radius-bottomleft: 4px; -moz-border-radius-topleft: 4px; } .btn-group > .btn:last-child, .btn-group > .dropdown-toggle { -webkit-border-top-right-radius: 4px; border-top-right-radius: 4px; -webkit-border-bottom-right-radius: 4px; border-bottom-right-radius: 4px; -moz-border-radius-topright: 4px; -moz-border-radius-bottomright: 4px; } .btn-group > .btn.large:first-child { margin-left: 0; -webkit-border-bottom-left-radius: 6px; border-bottom-left-radius: 6px; -webkit-border-top-left-radius: 6px; border-top-left-radius: 6px; -moz-border-radius-bottomleft: 6px; -moz-border-radius-topleft: 6px; } .btn-group > .btn.large:last-child, .btn-group > .large.dropdown-toggle { -webkit-border-top-right-radius: 6px; border-top-right-radius: 6px; -webkit-border-bottom-right-radius: 6px; border-bottom-right-radius: 6px; -moz-border-radius-topright: 6px; -moz-border-radius-bottomright: 6px; } .btn-group > .btn:hover, .btn-group > .btn:focus, .btn-group > .btn:active, .btn-group > .btn.active { z-index: 2; } .btn-group .dropdown-toggle:active, .btn-group.open .dropdown-toggle { outline: 0; } .btn-group > .btn + .dropdown-toggle { *padding-top: 5px; padding-right: 8px; *padding-bottom: 5px; padding-left: 8px; -webkit-box-shadow: inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn-group > .btn-mini + .dropdown-toggle { *padding-top: 2px; padding-right: 5px; *padding-bottom: 2px; padding-left: 5px; } .btn-group > .btn-small + .dropdown-toggle { *padding-top: 5px; *padding-bottom: 4px; } .btn-group > .btn-large + .dropdown-toggle { *padding-top: 7px; padding-right: 12px; *padding-bottom: 7px; padding-left: 12px; } .btn-group.open .dropdown-toggle { background-image: none; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn-group.open .btn.dropdown-toggle { background-color: #e6e6e6; } .btn-group.open .btn-primary.dropdown-toggle { background-color: #0044cc; } .btn-group.open .btn-warning.dropdown-toggle { background-color: #f89406; } .btn-group.open .btn-danger.dropdown-toggle { background-color: #bd362f; } .btn-group.open .btn-success.dropdown-toggle { background-color: #51a351; } .btn-group.open .btn-info.dropdown-toggle { background-color: #2f96b4; } .btn-group.open .btn-inverse.dropdown-toggle { background-color: #222222; } .btn .caret { margin-top: 8px; margin-left: 0; } .btn-mini .caret, .btn-small .caret, .btn-large .caret { margin-top: 6px; } .btn-large .caret { border-top-width: 5px; border-right-width: 5px; border-left-width: 5px; } .dropup .btn-large .caret { border-bottom-width: 5px; } .btn-primary .caret, .btn-warning .caret, .btn-danger .caret, .btn-info .caret, .btn-success .caret, .btn-inverse .caret { border-top-color: #ffffff; border-bottom-color: #ffffff; } .btn-group-vertical { display: inline-block; *display: inline; /* IE7 inline-block hack */ *zoom: 1; } .btn-group-vertical .btn { display: block; float: none; width: 100%; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .btn-group-vertical .btn + .btn { margin-top: -1px; margin-left: 0; } .btn-group-vertical .btn:first-child { -webkit-border-radius: 4px 4px 0 0; -moz-border-radius: 4px 4px 0 0; border-radius: 4px 4px 0 0; } .btn-group-vertical .btn:last-child { -webkit-border-radius: 0 0 4px 4px; -moz-border-radius: 0 0 4px 4px; border-radius: 0 0 4px 4px; } .btn-group-vertical .btn-large:first-child { -webkit-border-radius: 6px 6px 0 0; -moz-border-radius: 6px 6px 0 0; border-radius: 6px 6px 0 0; } .btn-group-vertical .btn-large:last-child { -webkit-border-radius: 0 0 6px 6px; -moz-border-radius: 0 0 6px 6px; border-radius: 0 0 6px 6px; } .alert { padding: 8px 35px 8px 14px; margin-bottom: 20px; color: #c09853; text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); background-color: #fcf8e3; border: 1px solid #fbeed5; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .alert h4 { margin: 0; } .alert .close { position: relative; top: -2px; right: -21px; line-height: 20px; } .alert-success { color: #468847; background-color: #dff0d8; border-color: #d6e9c6; } .alert-danger, .alert-error { color: #b94a48; background-color: #f2dede; border-color: #eed3d7; } .alert-info { color: #3a87ad; background-color: #d9edf7; border-color: #bce8f1; } .alert-block { padding-top: 14px; padding-bottom: 14px; } .alert-block > p, .alert-block > ul { margin-bottom: 0; } .alert-block p + p { margin-top: 5px; } .nav { margin-bottom: 20px; margin-left: 0; list-style: none; } .nav > li > a { display: block; } .nav > li > a:hover { text-decoration: none; background-color: #eeeeee; } .nav > .pull-right { float: right; } .nav-header { display: block; padding: 3px 15px; font-size: 11px; font-weight: bold; line-height: 20px; color: #999999; text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); text-transform: uppercase; } .nav li + .nav-header { margin-top: 9px; } .nav-list { padding-right: 15px; padding-left: 15px; margin-bottom: 0; } .nav-list > li > a, .nav-list .nav-header { margin-right: -15px; margin-left: -15px; text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); } .nav-list > li > a { padding: 3px 15px; } .nav-list > .active > a, .nav-list > .active > a:hover { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.2); background-color: #0088cc; } .nav-list [class^="icon-"], .nav-list [class*=" icon-"] { margin-right: 2px; } .nav-list .divider { *width: 100%; height: 1px; margin: 9px 1px; *margin: -5px 0 5px; overflow: hidden; background-color: #e5e5e5; border-bottom: 1px solid #ffffff; } .nav-tabs, .nav-pills { *zoom: 1; } .nav-tabs:before, .nav-pills:before, .nav-tabs:after, .nav-pills:after { display: table; line-height: 0; content: ""; } .nav-tabs:after, .nav-pills:after { clear: both; } .nav-tabs > li, .nav-pills > li { float: left; } .nav-tabs > li > a, .nav-pills > li > a { padding-right: 12px; padding-left: 12px; margin-right: 2px; line-height: 14px; } .nav-tabs { border-bottom: 1px solid #ddd; } .nav-tabs > li { margin-bottom: -1px; } .nav-tabs > li > a { padding-top: 8px; padding-bottom: 8px; line-height: 20px; border: 1px solid transparent; -webkit-border-radius: 4px 4px 0 0; -moz-border-radius: 4px 4px 0 0; border-radius: 4px 4px 0 0; } .nav-tabs > li > a:hover { border-color: #eeeeee #eeeeee #dddddd; } .nav-tabs > .active > a, .nav-tabs > .active > a:hover { color: #555555; cursor: default; background-color: #ffffff; border: 1px solid #ddd; border-bottom-color: transparent; } .nav-pills > li > a { padding-top: 8px; padding-bottom: 8px; margin-top: 2px; margin-bottom: 2px; -webkit-border-radius: 5px; -moz-border-radius: 5px; border-radius: 5px; } .nav-pills > .active > a, .nav-pills > .active > a:hover { color: #ffffff; background-color: #0088cc; } .nav-stacked > li { float: none; } .nav-stacked > li > a { margin-right: 0; } .nav-tabs.nav-stacked { border-bottom: 0; } .nav-tabs.nav-stacked > li > a { border: 1px solid #ddd; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .nav-tabs.nav-stacked > li:first-child > a { -webkit-border-top-right-radius: 4px; border-top-right-radius: 4px; -webkit-border-top-left-radius: 4px; border-top-left-radius: 4px; -moz-border-radius-topright: 4px; -moz-border-radius-topleft: 4px; } .nav-tabs.nav-stacked > li:last-child > a { -webkit-border-bottom-right-radius: 4px; border-bottom-right-radius: 4px; -webkit-border-bottom-left-radius: 4px; border-bottom-left-radius: 4px; -moz-border-radius-bottomright: 4px; -moz-border-radius-bottomleft: 4px; } .nav-tabs.nav-stacked > li > a:hover { z-index: 2; border-color: #ddd; } .nav-pills.nav-stacked > li > a { margin-bottom: 3px; } .nav-pills.nav-stacked > li:last-child > a { margin-bottom: 1px; } .nav-tabs .dropdown-menu { -webkit-border-radius: 0 0 6px 6px; -moz-border-radius: 0 0 6px 6px; border-radius: 0 0 6px 6px; } .nav-pills .dropdown-menu { -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; } .nav .dropdown-toggle .caret { margin-top: 6px; border-top-color: #0088cc; border-bottom-color: #0088cc; } .nav .dropdown-toggle:hover .caret { border-top-color: #005580; border-bottom-color: #005580; } /* move down carets for tabs */ .nav-tabs .dropdown-toggle .caret { margin-top: 8px; } .nav .active .dropdown-toggle .caret { border-top-color: #fff; border-bottom-color: #fff; } .nav-tabs .active .dropdown-toggle .caret { border-top-color: #555555; border-bottom-color: #555555; } .nav > .dropdown.active > a:hover { cursor: pointer; } .nav-tabs .open .dropdown-toggle, .nav-pills .open .dropdown-toggle, .nav > li.dropdown.open.active > a:hover { color: #ffffff; background-color: #999999; border-color: #999999; } .nav li.dropdown.open .caret, .nav li.dropdown.open.active .caret, .nav li.dropdown.open a:hover .caret { border-top-color: #ffffff; border-bottom-color: #ffffff; opacity: 1; filter: alpha(opacity=100); } .tabs-stacked .open > a:hover { border-color: #999999; } .tabbable { *zoom: 1; } .tabbable:before, .tabbable:after { display: table; line-height: 0; content: ""; } .tabbable:after { clear: both; } .tab-content { overflow: auto; } .tabs-below > .nav-tabs, .tabs-right > .nav-tabs, .tabs-left > .nav-tabs { border-bottom: 0; } .tab-content > .tab-pane, .pill-content > .pill-pane { display: none; } .tab-content > .active, .pill-content > .active { display: block; } .tabs-below > .nav-tabs { border-top: 1px solid #ddd; } .tabs-below > .nav-tabs > li { margin-top: -1px; margin-bottom: 0; } .tabs-below > .nav-tabs > li > a { -webkit-border-radius: 0 0 4px 4px; -moz-border-radius: 0 0 4px 4px; border-radius: 0 0 4px 4px; } .tabs-below > .nav-tabs > li > a:hover { border-top-color: #ddd; border-bottom-color: transparent; } .tabs-below > .nav-tabs > .active > a, .tabs-below > .nav-tabs > .active > a:hover { border-color: transparent #ddd #ddd #ddd; } .tabs-left > .nav-tabs > li, .tabs-right > .nav-tabs > li { float: none; } .tabs-left > .nav-tabs > li > a, .tabs-right > .nav-tabs > li > a { min-width: 74px; margin-right: 0; margin-bottom: 3px; } .tabs-left > .nav-tabs { float: left; margin-right: 19px; border-right: 1px solid #ddd; } .tabs-left > .nav-tabs > li > a { margin-right: -1px; -webkit-border-radius: 4px 0 0 4px; -moz-border-radius: 4px 0 0 4px; border-radius: 4px 0 0 4px; } .tabs-left > .nav-tabs > li > a:hover { border-color: #eeeeee #dddddd #eeeeee #eeeeee; } .tabs-left > .nav-tabs .active > a, .tabs-left > .nav-tabs .active > a:hover { border-color: #ddd transparent #ddd #ddd; *border-right-color: #ffffff; } .tabs-right > .nav-tabs { float: right; margin-left: 19px; border-left: 1px solid #ddd; } .tabs-right > .nav-tabs > li > a { margin-left: -1px; -webkit-border-radius: 0 4px 4px 0; -moz-border-radius: 0 4px 4px 0; border-radius: 0 4px 4px 0; } .tabs-right > .nav-tabs > li > a:hover { border-color: #eeeeee #eeeeee #eeeeee #dddddd; } .tabs-right > .nav-tabs .active > a, .tabs-right > .nav-tabs .active > a:hover { border-color: #ddd #ddd #ddd transparent; *border-left-color: #ffffff; } .nav > .disabled > a { color: #999999; } .nav > .disabled > a:hover { text-decoration: none; cursor: default; background-color: transparent; } .navbar { *position: relative; *z-index: 2; margin-bottom: 20px; overflow: visible; color: #777777; } .navbar-inner { min-height: 40px; padding-right: 20px; padding-left: 20px; background-color: #fafafa; background-image: -moz-linear-gradient(top, #ffffff, #f2f2f2); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#f2f2f2)); background-image: -webkit-linear-gradient(top, #ffffff, #f2f2f2); background-image: -o-linear-gradient(top, #ffffff, #f2f2f2); background-image: linear-gradient(to bottom, #ffffff, #f2f2f2); background-repeat: repeat-x; border: 1px solid #d4d4d4; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff2f2f2', GradientType=0); *zoom: 1; -webkit-box-shadow: 0 1px 4px rgba(0, 0, 0, 0.065); -moz-box-shadow: 0 1px 4px rgba(0, 0, 0, 0.065); box-shadow: 0 1px 4px rgba(0, 0, 0, 0.065); } .navbar-inner:before, .navbar-inner:after { display: table; line-height: 0; content: ""; } .navbar-inner:after { clear: both; } .navbar .container { width: auto; } .nav-collapse.collapse { height: auto; overflow: visible; } .navbar .brand { display: block; float: left; padding: 10px 20px 10px; margin-left: -20px; font-size: 20px; font-weight: 200; color: #777777; text-shadow: 0 1px 0 #ffffff; } .navbar .brand:hover { text-decoration: none; } .navbar-text { margin-bottom: 0; line-height: 40px; } .navbar-link { color: #777777; } .navbar-link:hover { color: #333333; } .navbar .divider-vertical { height: 40px; margin: 0 9px; border-right: 1px solid #ffffff; border-left: 1px solid #f2f2f2; } .navbar .btn, .navbar .btn-group { margin-top: 5px; } .navbar .btn-group .btn, .navbar .input-prepend .btn, .navbar .input-append .btn { margin-top: 0; } .navbar-form { margin-bottom: 0; *zoom: 1; } .navbar-form:before, .navbar-form:after { display: table; line-height: 0; content: ""; } .navbar-form:after { clear: both; } .navbar-form input, .navbar-form select, .navbar-form .radio, .navbar-form .checkbox { margin-top: 5px; } .navbar-form input, .navbar-form select, .navbar-form .btn { display: inline-block; margin-bottom: 0; } .navbar-form input[type="image"], .navbar-form input[type="checkbox"], .navbar-form input[type="radio"] { margin-top: 3px; } .navbar-form .input-append, .navbar-form .input-prepend { margin-top: 6px; white-space: nowrap; } .navbar-form .input-append input, .navbar-form .input-prepend input { margin-top: 0; } .navbar-search { position: relative; float: left; margin-top: 5px; margin-bottom: 0; } .navbar-search .search-query { padding: 4px 14px; margin-bottom: 0; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 13px; font-weight: normal; line-height: 1; -webkit-border-radius: 15px; -moz-border-radius: 15px; border-radius: 15px; } .navbar-static-top { position: static; margin-bottom: 0; } .navbar-static-top .navbar-inner { -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .navbar-fixed-top, .navbar-fixed-bottom { position: fixed; right: 0; left: 0; z-index: 1030; margin-bottom: 0; } .navbar-fixed-top .navbar-inner, .navbar-static-top .navbar-inner { border-width: 0 0 1px; } .navbar-fixed-bottom .navbar-inner { border-width: 1px 0 0; } .navbar-fixed-top .navbar-inner, .navbar-fixed-bottom .navbar-inner { padding-right: 0; padding-left: 0; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; } .navbar-static-top .container, .navbar-fixed-top .container, .navbar-fixed-bottom .container { width: 940px; } .navbar-fixed-top { top: 0; } .navbar-fixed-top .navbar-inner, .navbar-static-top .navbar-inner { -webkit-box-shadow: 0 1px 10px rgba(0, 0, 0, 0.1); -moz-box-shadow: 0 1px 10px rgba(0, 0, 0, 0.1); box-shadow: 0 1px 10px rgba(0, 0, 0, 0.1); } .navbar-fixed-bottom { bottom: 0; } .navbar-fixed-bottom .navbar-inner { -webkit-box-shadow: 0 -1px 10px rgba(0, 0, 0, 0.1); -moz-box-shadow: 0 -1px 10px rgba(0, 0, 0, 0.1); box-shadow: 0 -1px 10px rgba(0, 0, 0, 0.1); } .navbar .nav { position: relative; left: 0; display: block; float: left; margin: 0 10px 0 0; } .navbar .nav.pull-right { float: right; margin-right: 0; } .navbar .nav > li { float: left; } .navbar .nav > li > a { float: none; padding: 10px 15px 10px; color: #777777; text-decoration: none; text-shadow: 0 1px 0 #ffffff; } .navbar .nav .dropdown-toggle .caret { margin-top: 8px; } .navbar .nav > li > a:focus, .navbar .nav > li > a:hover { color: #333333; text-decoration: none; background-color: transparent; } .navbar .nav > .active > a, .navbar .nav > .active > a:hover, .navbar .nav > .active > a:focus { color: #555555; text-decoration: none; background-color: #e5e5e5; -webkit-box-shadow: inset 0 3px 8px rgba(0, 0, 0, 0.125); -moz-box-shadow: inset 0 3px 8px rgba(0, 0, 0, 0.125); box-shadow: inset 0 3px 8px rgba(0, 0, 0, 0.125); } .navbar .btn-navbar { display: none; float: right; padding: 7px 10px; margin-right: 5px; margin-left: 5px; color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #ededed; *background-color: #e5e5e5; background-image: -moz-linear-gradient(top, #f2f2f2, #e5e5e5); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#f2f2f2), to(#e5e5e5)); background-image: -webkit-linear-gradient(top, #f2f2f2, #e5e5e5); background-image: -o-linear-gradient(top, #f2f2f2, #e5e5e5); background-image: linear-gradient(to bottom, #f2f2f2, #e5e5e5); background-repeat: repeat-x; border-color: #e5e5e5 #e5e5e5 #bfbfbf; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2f2f2', endColorstr='#ffe5e5e5', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075); -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075); box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075); } .navbar .btn-navbar:hover, .navbar .btn-navbar:active, .navbar .btn-navbar.active, .navbar .btn-navbar.disabled, .navbar .btn-navbar[disabled] { color: #ffffff; background-color: #e5e5e5; *background-color: #d9d9d9; } .navbar .btn-navbar:active, .navbar .btn-navbar.active { background-color: #cccccc \9; } .navbar .btn-navbar .icon-bar { display: block; width: 18px; height: 2px; background-color: #f5f5f5; -webkit-border-radius: 1px; -moz-border-radius: 1px; border-radius: 1px; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.25); -moz-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.25); box-shadow: 0 1px 0 rgba(0, 0, 0, 0.25); } .btn-navbar .icon-bar + .icon-bar { margin-top: 3px; } .navbar .nav > li > .dropdown-menu:before { position: absolute; top: -7px; left: 9px; display: inline-block; border-right: 7px solid transparent; border-bottom: 7px solid #ccc; border-left: 7px solid transparent; border-bottom-color: rgba(0, 0, 0, 0.2); content: ''; } .navbar .nav > li > .dropdown-menu:after { position: absolute; top: -6px; left: 10px; display: inline-block; border-right: 6px solid transparent; border-bottom: 6px solid #ffffff; border-left: 6px solid transparent; content: ''; } .navbar-fixed-bottom .nav > li > .dropdown-menu:before { top: auto; bottom: -7px; border-top: 7px solid #ccc; border-bottom: 0; border-top-color: rgba(0, 0, 0, 0.2); } .navbar-fixed-bottom .nav > li > .dropdown-menu:after { top: auto; bottom: -6px; border-top: 6px solid #ffffff; border-bottom: 0; } .navbar .nav li.dropdown.open > .dropdown-toggle, .navbar .nav li.dropdown.active > .dropdown-toggle, .navbar .nav li.dropdown.open.active > .dropdown-toggle { color: #555555; background-color: #e5e5e5; } .navbar .nav li.dropdown > .dropdown-toggle .caret { border-top-color: #777777; border-bottom-color: #777777; } .navbar .nav li.dropdown.open > .dropdown-toggle .caret, .navbar .nav li.dropdown.active > .dropdown-toggle .caret, .navbar .nav li.dropdown.open.active > .dropdown-toggle .caret { border-top-color: #555555; border-bottom-color: #555555; } .navbar .pull-right > li > .dropdown-menu, .navbar .nav > li > .dropdown-menu.pull-right { right: 0; left: auto; } .navbar .pull-right > li > .dropdown-menu:before, .navbar .nav > li > .dropdown-menu.pull-right:before { right: 12px; left: auto; } .navbar .pull-right > li > .dropdown-menu:after, .navbar .nav > li > .dropdown-menu.pull-right:after { right: 13px; left: auto; } .navbar .pull-right > li > .dropdown-menu .dropdown-menu, .navbar .nav > li > .dropdown-menu.pull-right .dropdown-menu { right: 100%; left: auto; margin-right: -1px; margin-left: 0; -webkit-border-radius: 6px 0 6px 6px; -moz-border-radius: 6px 0 6px 6px; border-radius: 6px 0 6px 6px; } .navbar-inverse { color: #999999; } .navbar-inverse .navbar-inner { background-color: #1b1b1b; background-image: -moz-linear-gradient(top, #222222, #111111); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#222222), to(#111111)); background-image: -webkit-linear-gradient(top, #222222, #111111); background-image: -o-linear-gradient(top, #222222, #111111); background-image: linear-gradient(to bottom, #222222, #111111); background-repeat: repeat-x; border-color: #252525; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222', endColorstr='#ff111111', GradientType=0); } .navbar-inverse .brand, .navbar-inverse .nav > li > a { color: #999999; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); } .navbar-inverse .brand:hover, .navbar-inverse .nav > li > a:hover { color: #ffffff; } .navbar-inverse .nav > li > a:focus, .navbar-inverse .nav > li > a:hover { color: #ffffff; background-color: transparent; } .navbar-inverse .nav .active > a, .navbar-inverse .nav .active > a:hover, .navbar-inverse .nav .active > a:focus { color: #ffffff; background-color: #111111; } .navbar-inverse .navbar-link { color: #999999; } .navbar-inverse .navbar-link:hover { color: #ffffff; } .navbar-inverse .divider-vertical { border-right-color: #222222; border-left-color: #111111; } .navbar-inverse .nav li.dropdown.open > .dropdown-toggle, .navbar-inverse .nav li.dropdown.active > .dropdown-toggle, .navbar-inverse .nav li.dropdown.open.active > .dropdown-toggle { color: #ffffff; background-color: #111111; } .navbar-inverse .nav li.dropdown > .dropdown-toggle .caret { border-top-color: #999999; border-bottom-color: #999999; } .navbar-inverse .nav li.dropdown.open > .dropdown-toggle .caret, .navbar-inverse .nav li.dropdown.active > .dropdown-toggle .caret, .navbar-inverse .nav li.dropdown.open.active > .dropdown-toggle .caret { border-top-color: #ffffff; border-bottom-color: #ffffff; } .navbar-inverse .navbar-search .search-query { color: #ffffff; background-color: #515151; border-color: #111111; -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15); -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15); box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15); -webkit-transition: none; -moz-transition: none; -o-transition: none; transition: none; } .navbar-inverse .navbar-search .search-query:-moz-placeholder { color: #cccccc; } .navbar-inverse .navbar-search .search-query:-ms-input-placeholder { color: #cccccc; } .navbar-inverse .navbar-search .search-query::-webkit-input-placeholder { color: #cccccc; } .navbar-inverse .navbar-search .search-query:focus, .navbar-inverse .navbar-search .search-query.focused { padding: 5px 15px; color: #333333; text-shadow: 0 1px 0 #ffffff; background-color: #ffffff; border: 0; outline: 0; -webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); -moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); } .navbar-inverse .btn-navbar { color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #0e0e0e; *background-color: #040404; background-image: -moz-linear-gradient(top, #151515, #040404); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#151515), to(#040404)); background-image: -webkit-linear-gradient(top, #151515, #040404); background-image: -o-linear-gradient(top, #151515, #040404); background-image: linear-gradient(to bottom, #151515, #040404); background-repeat: repeat-x; border-color: #040404 #040404 #000000; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff151515', endColorstr='#ff040404', GradientType=0); filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); } .navbar-inverse .btn-navbar:hover, .navbar-inverse .btn-navbar:active, .navbar-inverse .btn-navbar.active, .navbar-inverse .btn-navbar.disabled, .navbar-inverse .btn-navbar[disabled] { color: #ffffff; background-color: #040404; *background-color: #000000; } .navbar-inverse .btn-navbar:active, .navbar-inverse .btn-navbar.active { background-color: #000000 \9; } .breadcrumb { padding: 8px 15px; margin: 0 0 20px; list-style: none; background-color: #f5f5f5; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .breadcrumb li { display: inline-block; *display: inline; text-shadow: 0 1px 0 #ffffff; *zoom: 1; } .breadcrumb .divider { padding: 0 5px; color: #ccc; } .breadcrumb .active { color: #999999; } .pagination { margin: 20px 0; } .pagination ul { display: inline-block; *display: inline; margin-bottom: 0; margin-left: 0; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; *zoom: 1; -webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05); } .pagination ul > li { display: inline; } .pagination ul > li > a, .pagination ul > li > span { float: left; padding: 4px 12px; line-height: 20px; text-decoration: none; background-color: #ffffff; border: 1px solid #dddddd; border-left-width: 0; } .pagination ul > li > a:hover, .pagination ul > .active > a, .pagination ul > .active > span { background-color: #f5f5f5; } .pagination ul > .active > a, .pagination ul > .active > span { color: #999999; cursor: default; } .pagination ul > .disabled > span, .pagination ul > .disabled > a, .pagination ul > .disabled > a:hover { color: #999999; cursor: default; background-color: transparent; } .pagination ul > li:first-child > a, .pagination ul > li:first-child > span { border-left-width: 1px; -webkit-border-bottom-left-radius: 4px; border-bottom-left-radius: 4px; -webkit-border-top-left-radius: 4px; border-top-left-radius: 4px; -moz-border-radius-bottomleft: 4px; -moz-border-radius-topleft: 4px; } .pagination ul > li:last-child > a, .pagination ul > li:last-child > span { -webkit-border-top-right-radius: 4px; border-top-right-radius: 4px; -webkit-border-bottom-right-radius: 4px; border-bottom-right-radius: 4px; -moz-border-radius-topright: 4px; -moz-border-radius-bottomright: 4px; } .pagination-centered { text-align: center; } .pagination-right { text-align: right; } .pagination-large ul > li > a, .pagination-large ul > li > span { padding: 11px 19px; font-size: 17.5px; } .pagination-large ul > li:first-child > a, .pagination-large ul > li:first-child > span { -webkit-border-bottom-left-radius: 6px; border-bottom-left-radius: 6px; -webkit-border-top-left-radius: 6px; border-top-left-radius: 6px; -moz-border-radius-bottomleft: 6px; -moz-border-radius-topleft: 6px; } .pagination-large ul > li:last-child > a, .pagination-large ul > li:last-child > span { -webkit-border-top-right-radius: 6px; border-top-right-radius: 6px; -webkit-border-bottom-right-radius: 6px; border-bottom-right-radius: 6px; -moz-border-radius-topright: 6px; -moz-border-radius-bottomright: 6px; } .pagination-mini ul > li:first-child > a, .pagination-small ul > li:first-child > a, .pagination-mini ul > li:first-child > span, .pagination-small ul > li:first-child > span { -webkit-border-bottom-left-radius: 3px; border-bottom-left-radius: 3px; -webkit-border-top-left-radius: 3px; border-top-left-radius: 3px; -moz-border-radius-bottomleft: 3px; -moz-border-radius-topleft: 3px; } .pagination-mini ul > li:last-child > a, .pagination-small ul > li:last-child > a, .pagination-mini ul > li:last-child > span, .pagination-small ul > li:last-child > span { -webkit-border-top-right-radius: 3px; border-top-right-radius: 3px; -webkit-border-bottom-right-radius: 3px; border-bottom-right-radius: 3px; -moz-border-radius-topright: 3px; -moz-border-radius-bottomright: 3px; } .pagination-small ul > li > a, .pagination-small ul > li > span { padding: 2px 10px; font-size: 11.9px; } .pagination-mini ul > li > a, .pagination-mini ul > li > span { padding: 1px 6px; font-size: 10.5px; } .pager { margin: 20px 0; text-align: center; list-style: none; *zoom: 1; } .pager:before, .pager:after { display: table; line-height: 0; content: ""; } .pager:after { clear: both; } .pager li { display: inline; } .pager li > a, .pager li > span { display: inline-block; padding: 5px 14px; background-color: #fff; border: 1px solid #ddd; -webkit-border-radius: 15px; -moz-border-radius: 15px; border-radius: 15px; } .pager li > a:hover { text-decoration: none; background-color: #f5f5f5; } .pager .next > a, .pager .next > span { float: right; } .pager .previous > a, .pager .previous > span { float: left; } .pager .disabled > a, .pager .disabled > a:hover, .pager .disabled > span { color: #999999; cursor: default; background-color: #fff; } .modal-backdrop { position: fixed; top: 0; right: 0; bottom: 0; left: 0; z-index: 1040; background-color: #000000; } .modal-backdrop.fade { opacity: 0; } .modal-backdrop, .modal-backdrop.fade.in { opacity: 0.8; filter: alpha(opacity=80); } .modal { position: fixed; top: 50%; left: 50%; z-index: 1050; width: 560px; margin: -250px 0 0 -280px; background-color: #ffffff; border: 1px solid #999; border: 1px solid rgba(0, 0, 0, 0.3); *border: 1px solid #999; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; outline: none; -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); -webkit-background-clip: padding-box; -moz-background-clip: padding-box; background-clip: padding-box; } .modal.fade { top: -25%; -webkit-transition: opacity 0.3s linear, top 0.3s ease-out; -moz-transition: opacity 0.3s linear, top 0.3s ease-out; -o-transition: opacity 0.3s linear, top 0.3s ease-out; transition: opacity 0.3s linear, top 0.3s ease-out; } .modal.fade.in { top: 50%; } .modal-header { padding: 9px 15px; border-bottom: 1px solid #eee; } .modal-header .close { margin-top: 2px; } .modal-header h3 { margin: 0; line-height: 30px; } .modal-body { max-height: 400px; padding: 15px; overflow-y: auto; } .modal-form { margin-bottom: 0; } .modal-footer { padding: 14px 15px 15px; margin-bottom: 0; text-align: right; background-color: #f5f5f5; border-top: 1px solid #ddd; -webkit-border-radius: 0 0 6px 6px; -moz-border-radius: 0 0 6px 6px; border-radius: 0 0 6px 6px; *zoom: 1; -webkit-box-shadow: inset 0 1px 0 #ffffff; -moz-box-shadow: inset 0 1px 0 #ffffff; box-shadow: inset 0 1px 0 #ffffff; } .modal-footer:before, .modal-footer:after { display: table; line-height: 0; content: ""; } .modal-footer:after { clear: both; } .modal-footer .btn + .btn { margin-bottom: 0; margin-left: 5px; } .modal-footer .btn-group .btn + .btn { margin-left: -1px; } .modal-footer .btn-block + .btn-block { margin-left: 0; } .tooltip { position: absolute; z-index: 1030; display: block; padding: 5px; font-size: 11px; opacity: 0; filter: alpha(opacity=0); visibility: visible; } .tooltip.in { opacity: 0.8; filter: alpha(opacity=80); } .tooltip.top { margin-top: -3px; } .tooltip.right { margin-left: 3px; } .tooltip.bottom { margin-top: 3px; } .tooltip.left { margin-left: -3px; } .tooltip-inner { max-width: 200px; padding: 3px 8px; color: #ffffff; text-align: center; text-decoration: none; background-color: #000000; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .tooltip-arrow { position: absolute; width: 0; height: 0; border-color: transparent; border-style: solid; } .tooltip.top .tooltip-arrow { bottom: 0; left: 50%; margin-left: -5px; border-top-color: #000000; border-width: 5px 5px 0; } .tooltip.right .tooltip-arrow { top: 50%; left: 0; margin-top: -5px; border-right-color: #000000; border-width: 5px 5px 5px 0; } .tooltip.left .tooltip-arrow { top: 50%; right: 0; margin-top: -5px; border-left-color: #000000; border-width: 5px 0 5px 5px; } .tooltip.bottom .tooltip-arrow { top: 0; left: 50%; margin-left: -5px; border-bottom-color: #000000; border-width: 0 5px 5px; } .popover { position: absolute; top: 0; left: 0; z-index: 1010; display: none; width: 236px; padding: 1px; background-color: #ffffff; border: 1px solid #ccc; border: 1px solid rgba(0, 0, 0, 0.2); -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; -webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); -moz-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2); -webkit-background-clip: padding-box; -moz-background-clip: padding; background-clip: padding-box; } .popover.top { margin-top: -10px; } .popover.right { margin-left: 10px; } .popover.bottom { margin-top: 10px; } .popover.left { margin-left: -10px; } .popover-title { padding: 8px 14px; margin: 0; font-size: 14px; font-weight: normal; line-height: 18px; background-color: #f7f7f7; border-bottom: 1px solid #ebebeb; -webkit-border-radius: 5px 5px 0 0; -moz-border-radius: 5px 5px 0 0; border-radius: 5px 5px 0 0; } .popover-content { padding: 9px 14px; } .popover-content p, .popover-content ul, .popover-content ol { margin-bottom: 0; } .popover .arrow, .popover .arrow:after { position: absolute; display: inline-block; width: 0; height: 0; border-color: transparent; border-style: solid; } .popover .arrow:after { z-index: -1; content: ""; } .popover.top .arrow { bottom: -10px; left: 50%; margin-left: -10px; border-top-color: #ffffff; border-width: 10px 10px 0; } .popover.top .arrow:after { bottom: -1px; left: -11px; border-top-color: rgba(0, 0, 0, 0.25); border-width: 11px 11px 0; } .popover.right .arrow { top: 50%; left: -10px; margin-top: -10px; border-right-color: #ffffff; border-width: 10px 10px 10px 0; } .popover.right .arrow:after { bottom: -11px; left: -1px; border-right-color: rgba(0, 0, 0, 0.25); border-width: 11px 11px 11px 0; } .popover.bottom .arrow { top: -10px; left: 50%; margin-left: -10px; border-bottom-color: #ffffff; border-width: 0 10px 10px; } .popover.bottom .arrow:after { top: -1px; left: -11px; border-bottom-color: rgba(0, 0, 0, 0.25); border-width: 0 11px 11px; } .popover.left .arrow { top: 50%; right: -10px; margin-top: -10px; border-left-color: #ffffff; border-width: 10px 0 10px 10px; } .popover.left .arrow:after { right: -1px; bottom: -11px; border-left-color: rgba(0, 0, 0, 0.25); border-width: 11px 0 11px 11px; } .thumbnails { margin-left: -20px; list-style: none; *zoom: 1; } .thumbnails:before, .thumbnails:after { display: table; line-height: 0; content: ""; } .thumbnails:after { clear: both; } .row-fluid .thumbnails { margin-left: 0; } .thumbnails > li { float: left; margin-bottom: 20px; margin-left: 20px; } .thumbnail { display: block; padding: 4px; line-height: 20px; border: 1px solid #ddd; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; -webkit-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.055); -moz-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.055); box-shadow: 0 1px 3px rgba(0, 0, 0, 0.055); -webkit-transition: all 0.2s ease-in-out; -moz-transition: all 0.2s ease-in-out; -o-transition: all 0.2s ease-in-out; transition: all 0.2s ease-in-out; } a.thumbnail:hover { border-color: #0088cc; -webkit-box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); -moz-box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); box-shadow: 0 1px 4px rgba(0, 105, 214, 0.25); } .thumbnail > img { display: block; max-width: 100%; margin-right: auto; margin-left: auto; } .thumbnail .caption { padding: 9px; color: #555555; } .media, .media-body { overflow: hidden; *overflow: visible; zoom: 1; } .media, .media .media { margin-top: 15px; } .media:first-child { margin-top: 0; } .media-object { display: block; } .media-heading { margin: 0 0 5px; } .media .pull-left { margin-right: 10px; } .media .pull-right { margin-left: 10px; } .media-list { margin-left: 0; list-style: none; } .label, .badge { display: inline-block; padding: 2px 4px; font-size: 11.844px; font-weight: bold; line-height: 14px; color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); white-space: nowrap; vertical-align: baseline; background-color: #999999; } .label { -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } .badge { padding-right: 9px; padding-left: 9px; -webkit-border-radius: 9px; -moz-border-radius: 9px; border-radius: 9px; } a.label:hover, a.badge:hover { color: #ffffff; text-decoration: none; cursor: pointer; } .label-important, .badge-important { background-color: #b94a48; } .label-important[href], .badge-important[href] { background-color: #953b39; } .label-warning, .badge-warning { background-color: #f89406; } .label-warning[href], .badge-warning[href] { background-color: #c67605; } .label-success, .badge-success { background-color: #468847; } .label-success[href], .badge-success[href] { background-color: #356635; } .label-info, .badge-info { background-color: #3a87ad; } .label-info[href], .badge-info[href] { background-color: #2d6987; } .label-inverse, .badge-inverse { background-color: #333333; } .label-inverse[href], .badge-inverse[href] { background-color: #1a1a1a; } .btn .label, .btn .badge { position: relative; top: -1px; } .btn-mini .label, .btn-mini .badge { top: 0; } @-webkit-keyframes progress-bar-stripes { from { background-position: 40px 0; } to { background-position: 0 0; } } @-moz-keyframes progress-bar-stripes { from { background-position: 40px 0; } to { background-position: 0 0; } } @-ms-keyframes progress-bar-stripes { from { background-position: 40px 0; } to { background-position: 0 0; } } @-o-keyframes progress-bar-stripes { from { background-position: 0 0; } to { background-position: 40px 0; } } @keyframes progress-bar-stripes { from { background-position: 40px 0; } to { background-position: 0 0; } } .progress { height: 20px; margin-bottom: 20px; overflow: hidden; background-color: #f7f7f7; background-image: -moz-linear-gradient(top, #f5f5f5, #f9f9f9); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#f5f5f5), to(#f9f9f9)); background-image: -webkit-linear-gradient(top, #f5f5f5, #f9f9f9); background-image: -o-linear-gradient(top, #f5f5f5, #f9f9f9); background-image: linear-gradient(to bottom, #f5f5f5, #f9f9f9); background-repeat: repeat-x; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#fff9f9f9', GradientType=0); -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); } .progress .bar { float: left; width: 0; height: 100%; font-size: 12px; color: #ffffff; text-align: center; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); background-color: #0e90d2; background-image: -moz-linear-gradient(top, #149bdf, #0480be); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#149bdf), to(#0480be)); background-image: -webkit-linear-gradient(top, #149bdf, #0480be); background-image: -o-linear-gradient(top, #149bdf, #0480be); background-image: linear-gradient(to bottom, #149bdf, #0480be); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff149bdf', endColorstr='#ff0480be', GradientType=0); -webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15); -moz-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15); box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15); -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; -webkit-transition: width 0.6s ease; -moz-transition: width 0.6s ease; -o-transition: width 0.6s ease; transition: width 0.6s ease; } .progress .bar + .bar { -webkit-box-shadow: inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15); -moz-box-shadow: inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15); box-shadow: inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15); } .progress-striped .bar { background-color: #149bdf; background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -webkit-background-size: 40px 40px; -moz-background-size: 40px 40px; -o-background-size: 40px 40px; background-size: 40px 40px; } .progress.active .bar { -webkit-animation: progress-bar-stripes 2s linear infinite; -moz-animation: progress-bar-stripes 2s linear infinite; -ms-animation: progress-bar-stripes 2s linear infinite; -o-animation: progress-bar-stripes 2s linear infinite; animation: progress-bar-stripes 2s linear infinite; } .progress-danger .bar, .progress .bar-danger { background-color: #dd514c; background-image: -moz-linear-gradient(top, #ee5f5b, #c43c35); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#c43c35)); background-image: -webkit-linear-gradient(top, #ee5f5b, #c43c35); background-image: -o-linear-gradient(top, #ee5f5b, #c43c35); background-image: linear-gradient(to bottom, #ee5f5b, #c43c35); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b', endColorstr='#ffc43c35', GradientType=0); } .progress-danger.progress-striped .bar, .progress-striped .bar-danger { background-color: #ee5f5b; background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); } .progress-success .bar, .progress .bar-success { background-color: #5eb95e; background-image: -moz-linear-gradient(top, #62c462, #57a957); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#57a957)); background-image: -webkit-linear-gradient(top, #62c462, #57a957); background-image: -o-linear-gradient(top, #62c462, #57a957); background-image: linear-gradient(to bottom, #62c462, #57a957); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462', endColorstr='#ff57a957', GradientType=0); } .progress-success.progress-striped .bar, .progress-striped .bar-success { background-color: #62c462; background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); } .progress-info .bar, .progress .bar-info { background-color: #4bb1cf; background-image: -moz-linear-gradient(top, #5bc0de, #339bb9); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#339bb9)); background-image: -webkit-linear-gradient(top, #5bc0de, #339bb9); background-image: -o-linear-gradient(top, #5bc0de, #339bb9); background-image: linear-gradient(to bottom, #5bc0de, #339bb9); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff339bb9', GradientType=0); } .progress-info.progress-striped .bar, .progress-striped .bar-info { background-color: #5bc0de; background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); } .progress-warning .bar, .progress .bar-warning { background-color: #faa732; background-image: -moz-linear-gradient(top, #fbb450, #f89406); background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406)); background-image: -webkit-linear-gradient(top, #fbb450, #f89406); background-image: -o-linear-gradient(top, #fbb450, #f89406); background-image: linear-gradient(to bottom, #fbb450, #f89406); background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0); } .progress-warning.progress-striped .bar, .progress-striped .bar-warning { background-color: #fbb450; background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent)); background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); } .accordion { margin-bottom: 20px; } .accordion-group { margin-bottom: 2px; border: 1px solid #e5e5e5; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .accordion-heading { border-bottom: 0; } .accordion-heading .accordion-toggle { display: block; padding: 8px 15px; } .accordion-toggle { cursor: pointer; } .accordion-inner { padding: 9px 15px; border-top: 1px solid #e5e5e5; } .carousel { position: relative; margin-bottom: 20px; line-height: 1; } .carousel-inner { position: relative; width: 100%; overflow: hidden; } .carousel .item { position: relative; display: none; -webkit-transition: 0.6s ease-in-out left; -moz-transition: 0.6s ease-in-out left; -o-transition: 0.6s ease-in-out left; transition: 0.6s ease-in-out left; } .carousel .item > img { display: block; line-height: 1; } .carousel .active, .carousel .next, .carousel .prev { display: block; } .carousel .active { left: 0; } .carousel .next, .carousel .prev { position: absolute; top: 0; width: 100%; } .carousel .next { left: 100%; } .carousel .prev { left: -100%; } .carousel .next.left, .carousel .prev.right { left: 0; } .carousel .active.left { left: -100%; } .carousel .active.right { left: 100%; } .carousel-control { position: absolute; top: 40%; left: 15px; width: 40px; height: 40px; margin-top: -20px; font-size: 60px; font-weight: 100; line-height: 30px; color: #ffffff; text-align: center; background: #222222; border: 3px solid #ffffff; -webkit-border-radius: 23px; -moz-border-radius: 23px; border-radius: 23px; opacity: 0.5; filter: alpha(opacity=50); } .carousel-control.right { right: 15px; left: auto; } .carousel-control:hover { color: #ffffff; text-decoration: none; opacity: 0.9; filter: alpha(opacity=90); } .carousel-caption { position: absolute; right: 0; bottom: 0; left: 0; padding: 15px; background: #333333; background: rgba(0, 0, 0, 0.75); } .carousel-caption h4, .carousel-caption p { line-height: 20px; color: #ffffff; } .carousel-caption h4 { margin: 0 0 5px; } .carousel-caption p { margin-bottom: 0; } .hero-unit { padding: 60px; margin-bottom: 30px; font-size: 18px; font-weight: 200; line-height: 30px; color: inherit; background-color: #eeeeee; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; } .hero-unit h1 { margin-bottom: 0; font-size: 60px; line-height: 1; letter-spacing: -1px; color: inherit; } .hero-unit li { line-height: 30px; } .pull-right { float: right; } .pull-left { float: left; } .hide { display: none; } .show { display: block; } .invisible { visibility: hidden; } .affix { position: fixed; } ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/css/bootstrap-responsive.css0000664000567000056700000005231512540642615031514 0ustar jenkinsjenkins00000000000000/*! * Bootstrap Responsive v2.2.1 * * Copyright 2012 Twitter, Inc * Licensed under the Apache License v2.0 * http://www.apache.org/licenses/LICENSE-2.0 * * Designed and built with all the love in the world @twitter by @mdo and @fat. */ .clearfix { *zoom: 1; } .clearfix:before, .clearfix:after { display: table; line-height: 0; content: ""; } .clearfix:after { clear: both; } .hide-text { font: 0/0 a; color: transparent; text-shadow: none; background-color: transparent; border: 0; } .input-block-level { display: block; width: 100%; min-height: 30px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .hidden { display: none; visibility: hidden; } .visible-phone { display: none !important; } .visible-tablet { display: none !important; } .hidden-desktop { display: none !important; } .visible-desktop { display: inherit !important; } @media (min-width: 768px) and (max-width: 979px) { .hidden-desktop { display: inherit !important; } .visible-desktop { display: none !important ; } .visible-tablet { display: inherit !important; } .hidden-tablet { display: none !important; } } @media (max-width: 767px) { .hidden-desktop { display: inherit !important; } .visible-desktop { display: none !important; } .visible-phone { display: inherit !important; } .hidden-phone { display: none !important; } } @media (min-width: 1200px) { .row { margin-left: -30px; *zoom: 1; } .row:before, .row:after { display: table; line-height: 0; content: ""; } .row:after { clear: both; } [class*="span"] { float: left; min-height: 1px; margin-left: 30px; } .container, .navbar-static-top .container, .navbar-fixed-top .container, .navbar-fixed-bottom .container { width: 1170px; } .span12 { width: 1170px; } .span11 { width: 1070px; } .span10 { width: 970px; } .span9 { width: 870px; } .span8 { width: 770px; } .span7 { width: 670px; } .span6 { width: 570px; } .span5 { width: 470px; } .span4 { width: 370px; } .span3 { width: 270px; } .span2 { width: 170px; } .span1 { width: 70px; } .offset12 { margin-left: 1230px; } .offset11 { margin-left: 1130px; } .offset10 { margin-left: 1030px; } .offset9 { margin-left: 930px; } .offset8 { margin-left: 830px; } .offset7 { margin-left: 730px; } .offset6 { margin-left: 630px; } .offset5 { margin-left: 530px; } .offset4 { margin-left: 430px; } .offset3 { margin-left: 330px; } .offset2 { margin-left: 230px; } .offset1 { margin-left: 130px; } .row-fluid { width: 100%; *zoom: 1; } .row-fluid:before, .row-fluid:after { display: table; line-height: 0; content: ""; } .row-fluid:after { clear: both; } .row-fluid [class*="span"] { display: block; float: left; width: 100%; min-height: 30px; margin-left: 2.564102564102564%; *margin-left: 2.5109110747408616%; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .row-fluid [class*="span"]:first-child { margin-left: 0; } .row-fluid .controls-row [class*="span"] + [class*="span"] { margin-left: 2.564102564102564%; } .row-fluid .span12 { width: 100%; *width: 99.94680851063829%; } .row-fluid .span11 { width: 91.45299145299145%; *width: 91.39979996362975%; } .row-fluid .span10 { width: 82.90598290598291%; *width: 82.8527914166212%; } .row-fluid .span9 { width: 74.35897435897436%; *width: 74.30578286961266%; } .row-fluid .span8 { width: 65.81196581196582%; *width: 65.75877432260411%; } .row-fluid .span7 { width: 57.26495726495726%; *width: 57.21176577559556%; } .row-fluid .span6 { width: 48.717948717948715%; *width: 48.664757228587014%; } .row-fluid .span5 { width: 40.17094017094017%; *width: 40.11774868157847%; } .row-fluid .span4 { width: 31.623931623931625%; *width: 31.570740134569924%; } .row-fluid .span3 { width: 23.076923076923077%; *width: 23.023731587561375%; } .row-fluid .span2 { width: 14.52991452991453%; *width: 14.476723040552828%; } .row-fluid .span1 { width: 5.982905982905983%; *width: 5.929714493544281%; } .row-fluid .offset12 { margin-left: 105.12820512820512%; *margin-left: 105.02182214948171%; } .row-fluid .offset12:first-child { margin-left: 102.56410256410257%; *margin-left: 102.45771958537915%; } .row-fluid .offset11 { margin-left: 96.58119658119658%; *margin-left: 96.47481360247316%; } .row-fluid .offset11:first-child { margin-left: 94.01709401709402%; *margin-left: 93.91071103837061%; } .row-fluid .offset10 { margin-left: 88.03418803418803%; *margin-left: 87.92780505546462%; } .row-fluid .offset10:first-child { margin-left: 85.47008547008548%; *margin-left: 85.36370249136206%; } .row-fluid .offset9 { margin-left: 79.48717948717949%; *margin-left: 79.38079650845607%; } .row-fluid .offset9:first-child { margin-left: 76.92307692307693%; *margin-left: 76.81669394435352%; } .row-fluid .offset8 { margin-left: 70.94017094017094%; *margin-left: 70.83378796144753%; } .row-fluid .offset8:first-child { margin-left: 68.37606837606839%; *margin-left: 68.26968539734497%; } .row-fluid .offset7 { margin-left: 62.393162393162385%; *margin-left: 62.28677941443899%; } .row-fluid .offset7:first-child { margin-left: 59.82905982905982%; *margin-left: 59.72267685033642%; } .row-fluid .offset6 { margin-left: 53.84615384615384%; *margin-left: 53.739770867430444%; } .row-fluid .offset6:first-child { margin-left: 51.28205128205128%; *margin-left: 51.175668303327875%; } .row-fluid .offset5 { margin-left: 45.299145299145295%; *margin-left: 45.1927623204219%; } .row-fluid .offset5:first-child { margin-left: 42.73504273504273%; *margin-left: 42.62865975631933%; } .row-fluid .offset4 { margin-left: 36.75213675213675%; *margin-left: 36.645753773413354%; } .row-fluid .offset4:first-child { margin-left: 34.18803418803419%; *margin-left: 34.081651209310785%; } .row-fluid .offset3 { margin-left: 28.205128205128204%; *margin-left: 28.0987452264048%; } .row-fluid .offset3:first-child { margin-left: 25.641025641025642%; *margin-left: 25.53464266230224%; } .row-fluid .offset2 { margin-left: 19.65811965811966%; *margin-left: 19.551736679396257%; } .row-fluid .offset2:first-child { margin-left: 17.094017094017094%; *margin-left: 16.98763411529369%; } .row-fluid .offset1 { margin-left: 11.11111111111111%; *margin-left: 11.004728132387708%; } .row-fluid .offset1:first-child { margin-left: 8.547008547008547%; *margin-left: 8.440625568285142%; } input, textarea, .uneditable-input { margin-left: 0; } .controls-row [class*="span"] + [class*="span"] { margin-left: 30px; } input.span12, textarea.span12, .uneditable-input.span12 { width: 1156px; } input.span11, textarea.span11, .uneditable-input.span11 { width: 1056px; } input.span10, textarea.span10, .uneditable-input.span10 { width: 956px; } input.span9, textarea.span9, .uneditable-input.span9 { width: 856px; } input.span8, textarea.span8, .uneditable-input.span8 { width: 756px; } input.span7, textarea.span7, .uneditable-input.span7 { width: 656px; } input.span6, textarea.span6, .uneditable-input.span6 { width: 556px; } input.span5, textarea.span5, .uneditable-input.span5 { width: 456px; } input.span4, textarea.span4, .uneditable-input.span4 { width: 356px; } input.span3, textarea.span3, .uneditable-input.span3 { width: 256px; } input.span2, textarea.span2, .uneditable-input.span2 { width: 156px; } input.span1, textarea.span1, .uneditable-input.span1 { width: 56px; } .thumbnails { margin-left: -30px; } .thumbnails > li { margin-left: 30px; } .row-fluid .thumbnails { margin-left: 0; } } @media (min-width: 768px) and (max-width: 979px) { .row { margin-left: -20px; *zoom: 1; } .row:before, .row:after { display: table; line-height: 0; content: ""; } .row:after { clear: both; } [class*="span"] { float: left; min-height: 1px; margin-left: 20px; } .container, .navbar-static-top .container, .navbar-fixed-top .container, .navbar-fixed-bottom .container { width: 724px; } .span12 { width: 724px; } .span11 { width: 662px; } .span10 { width: 600px; } .span9 { width: 538px; } .span8 { width: 476px; } .span7 { width: 414px; } .span6 { width: 352px; } .span5 { width: 290px; } .span4 { width: 228px; } .span3 { width: 166px; } .span2 { width: 104px; } .span1 { width: 42px; } .offset12 { margin-left: 764px; } .offset11 { margin-left: 702px; } .offset10 { margin-left: 640px; } .offset9 { margin-left: 578px; } .offset8 { margin-left: 516px; } .offset7 { margin-left: 454px; } .offset6 { margin-left: 392px; } .offset5 { margin-left: 330px; } .offset4 { margin-left: 268px; } .offset3 { margin-left: 206px; } .offset2 { margin-left: 144px; } .offset1 { margin-left: 82px; } .row-fluid { width: 100%; *zoom: 1; } .row-fluid:before, .row-fluid:after { display: table; line-height: 0; content: ""; } .row-fluid:after { clear: both; } .row-fluid [class*="span"] { display: block; float: left; width: 100%; min-height: 30px; margin-left: 2.7624309392265194%; *margin-left: 2.709239449864817%; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .row-fluid [class*="span"]:first-child { margin-left: 0; } .row-fluid .controls-row [class*="span"] + [class*="span"] { margin-left: 2.7624309392265194%; } .row-fluid .span12 { width: 100%; *width: 99.94680851063829%; } .row-fluid .span11 { width: 91.43646408839778%; *width: 91.38327259903608%; } .row-fluid .span10 { width: 82.87292817679558%; *width: 82.81973668743387%; } .row-fluid .span9 { width: 74.30939226519337%; *width: 74.25620077583166%; } .row-fluid .span8 { width: 65.74585635359117%; *width: 65.69266486422946%; } .row-fluid .span7 { width: 57.18232044198895%; *width: 57.12912895262725%; } .row-fluid .span6 { width: 48.61878453038674%; *width: 48.56559304102504%; } .row-fluid .span5 { width: 40.05524861878453%; *width: 40.00205712942283%; } .row-fluid .span4 { width: 31.491712707182323%; *width: 31.43852121782062%; } .row-fluid .span3 { width: 22.92817679558011%; *width: 22.87498530621841%; } .row-fluid .span2 { width: 14.3646408839779%; *width: 14.311449394616199%; } .row-fluid .span1 { width: 5.801104972375691%; *width: 5.747913483013988%; } .row-fluid .offset12 { margin-left: 105.52486187845304%; *margin-left: 105.41847889972962%; } .row-fluid .offset12:first-child { margin-left: 102.76243093922652%; *margin-left: 102.6560479605031%; } .row-fluid .offset11 { margin-left: 96.96132596685082%; *margin-left: 96.8549429881274%; } .row-fluid .offset11:first-child { margin-left: 94.1988950276243%; *margin-left: 94.09251204890089%; } .row-fluid .offset10 { margin-left: 88.39779005524862%; *margin-left: 88.2914070765252%; } .row-fluid .offset10:first-child { margin-left: 85.6353591160221%; *margin-left: 85.52897613729868%; } .row-fluid .offset9 { margin-left: 79.8342541436464%; *margin-left: 79.72787116492299%; } .row-fluid .offset9:first-child { margin-left: 77.07182320441989%; *margin-left: 76.96544022569647%; } .row-fluid .offset8 { margin-left: 71.2707182320442%; *margin-left: 71.16433525332079%; } .row-fluid .offset8:first-child { margin-left: 68.50828729281768%; *margin-left: 68.40190431409427%; } .row-fluid .offset7 { margin-left: 62.70718232044199%; *margin-left: 62.600799341718584%; } .row-fluid .offset7:first-child { margin-left: 59.94475138121547%; *margin-left: 59.838368402492065%; } .row-fluid .offset6 { margin-left: 54.14364640883978%; *margin-left: 54.037263430116376%; } .row-fluid .offset6:first-child { margin-left: 51.38121546961326%; *margin-left: 51.27483249088986%; } .row-fluid .offset5 { margin-left: 45.58011049723757%; *margin-left: 45.47372751851417%; } .row-fluid .offset5:first-child { margin-left: 42.81767955801105%; *margin-left: 42.71129657928765%; } .row-fluid .offset4 { margin-left: 37.01657458563536%; *margin-left: 36.91019160691196%; } .row-fluid .offset4:first-child { margin-left: 34.25414364640884%; *margin-left: 34.14776066768544%; } .row-fluid .offset3 { margin-left: 28.45303867403315%; *margin-left: 28.346655695309746%; } .row-fluid .offset3:first-child { margin-left: 25.69060773480663%; *margin-left: 25.584224756083227%; } .row-fluid .offset2 { margin-left: 19.88950276243094%; *margin-left: 19.783119783707537%; } .row-fluid .offset2:first-child { margin-left: 17.12707182320442%; *margin-left: 17.02068884448102%; } .row-fluid .offset1 { margin-left: 11.32596685082873%; *margin-left: 11.219583872105325%; } .row-fluid .offset1:first-child { margin-left: 8.56353591160221%; *margin-left: 8.457152932878806%; } input, textarea, .uneditable-input { margin-left: 0; } .controls-row [class*="span"] + [class*="span"] { margin-left: 20px; } input.span12, textarea.span12, .uneditable-input.span12 { width: 710px; } input.span11, textarea.span11, .uneditable-input.span11 { width: 648px; } input.span10, textarea.span10, .uneditable-input.span10 { width: 586px; } input.span9, textarea.span9, .uneditable-input.span9 { width: 524px; } input.span8, textarea.span8, .uneditable-input.span8 { width: 462px; } input.span7, textarea.span7, .uneditable-input.span7 { width: 400px; } input.span6, textarea.span6, .uneditable-input.span6 { width: 338px; } input.span5, textarea.span5, .uneditable-input.span5 { width: 276px; } input.span4, textarea.span4, .uneditable-input.span4 { width: 214px; } input.span3, textarea.span3, .uneditable-input.span3 { width: 152px; } input.span2, textarea.span2, .uneditable-input.span2 { width: 90px; } input.span1, textarea.span1, .uneditable-input.span1 { width: 28px; } } @media (max-width: 767px) { body { padding-right: 20px; padding-left: 20px; } .navbar-fixed-top, .navbar-fixed-bottom, .navbar-static-top { margin-right: -20px; margin-left: -20px; } .container-fluid { padding: 0; } .dl-horizontal dt { float: none; width: auto; clear: none; text-align: left; } .dl-horizontal dd { margin-left: 0; } .container { width: auto; } .row-fluid { width: 100%; } .row, .thumbnails { margin-left: 0; } .thumbnails > li { float: none; margin-left: 0; } [class*="span"], .uneditable-input[class*="span"], .row-fluid [class*="span"] { display: block; float: none; width: 100%; margin-left: 0; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .span12, .row-fluid .span12 { width: 100%; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .row-fluid [class*="offset"]:first-child { margin-left: 0; } .input-large, .input-xlarge, .input-xxlarge, input[class*="span"], select[class*="span"], textarea[class*="span"], .uneditable-input { display: block; width: 100%; min-height: 30px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; } .input-prepend input, .input-append input, .input-prepend input[class*="span"], .input-append input[class*="span"] { display: inline-block; width: auto; } .controls-row [class*="span"] + [class*="span"] { margin-left: 0; } .modal { position: fixed; top: 20px; right: 20px; left: 20px; width: auto; margin: 0; } .modal.fade { top: -100px; } .modal.fade.in { top: 20px; } } @media (max-width: 480px) { .nav-collapse { -webkit-transform: translate3d(0, 0, 0); } .page-header h1 small { display: block; line-height: 20px; } input[type="checkbox"], input[type="radio"] { border: 1px solid #ccc; } .form-horizontal .control-label { float: none; width: auto; padding-top: 0; text-align: left; } .form-horizontal .controls { margin-left: 0; } .form-horizontal .control-list { padding-top: 0; } .form-horizontal .form-actions { padding-right: 10px; padding-left: 10px; } .media .pull-left, .media .pull-right { display: block; float: none; margin-bottom: 10px; } .media-object { margin-right: 0; margin-left: 0; } .modal { top: 10px; right: 10px; left: 10px; } .modal-header .close { padding: 10px; margin: -10px; } .carousel-caption { position: static; } } @media (max-width: 979px) { body { padding-top: 0; } .navbar-fixed-top, .navbar-fixed-bottom { position: static; } .navbar-fixed-top { margin-bottom: 20px; } .navbar-fixed-bottom { margin-top: 20px; } .navbar-fixed-top .navbar-inner, .navbar-fixed-bottom .navbar-inner { padding: 5px; } .navbar .container { width: auto; padding: 0; } .navbar .brand { padding-right: 10px; padding-left: 10px; margin: 0 0 0 -5px; } .nav-collapse { clear: both; } .nav-collapse .nav { float: none; margin: 0 0 10px; } .nav-collapse .nav > li { float: none; } .nav-collapse .nav > li > a { margin-bottom: 2px; } .nav-collapse .nav > .divider-vertical { display: none; } .nav-collapse .nav .nav-header { color: #777777; text-shadow: none; } .nav-collapse .nav > li > a, .nav-collapse .dropdown-menu a { padding: 9px 15px; font-weight: bold; color: #777777; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } .nav-collapse .btn { padding: 4px 10px 4px; font-weight: normal; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; } .nav-collapse .dropdown-menu li + li a { margin-bottom: 2px; } .nav-collapse .nav > li > a:hover, .nav-collapse .dropdown-menu a:hover { background-color: #f2f2f2; } .navbar-inverse .nav-collapse .nav > li > a, .navbar-inverse .nav-collapse .dropdown-menu a { color: #999999; } .navbar-inverse .nav-collapse .nav > li > a:hover, .navbar-inverse .nav-collapse .dropdown-menu a:hover { background-color: #111111; } .nav-collapse.in .btn-group { padding: 0; margin-top: 5px; } .nav-collapse .dropdown-menu { position: static; top: auto; left: auto; display: none; float: none; max-width: none; padding: 0; margin: 0 15px; background-color: transparent; border: none; -webkit-border-radius: 0; -moz-border-radius: 0; border-radius: 0; -webkit-box-shadow: none; -moz-box-shadow: none; box-shadow: none; } .nav-collapse .open > .dropdown-menu { display: block; } .nav-collapse .dropdown-menu:before, .nav-collapse .dropdown-menu:after { display: none; } .nav-collapse .dropdown-menu .divider { display: none; } .nav-collapse .nav > li > .dropdown-menu:before, .nav-collapse .nav > li > .dropdown-menu:after { display: none; } .nav-collapse .navbar-form, .nav-collapse .navbar-search { float: none; padding: 10px 15px; margin: 10px 0; border-top: 1px solid #f2f2f2; border-bottom: 1px solid #f2f2f2; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); } .navbar-inverse .nav-collapse .navbar-form, .navbar-inverse .nav-collapse .navbar-search { border-top-color: #111111; border-bottom-color: #111111; } .navbar .nav-collapse .nav.pull-right { float: none; margin-left: 0; } .nav-collapse, .nav-collapse.collapse { height: 0; overflow: hidden; } .navbar .btn-navbar { display: block; } .navbar-static .navbar-inner { padding-right: 10px; padding-left: 10px; } } @media (min-width: 980px) { .nav-collapse.collapse { height: auto !important; overflow: visible !important; } } ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/js/0000775000567000056700000000000012540643223024404 5ustar jenkinsjenkins00000000000000ceilometer-2014.1.5/ceilometer/api/v1/static/bootstrap/js/bootstrap.js0000664000567000056700000015623612540642615027000 0ustar jenkinsjenkins00000000000000/* =================================================== * bootstrap-transition.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#transitions * =================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { "use strict"; // jshint ;_; /* CSS TRANSITION SUPPORT (http://www.modernizr.com/) * ======================================================= */ $(function () { $.support.transition = (function () { var transitionEnd = (function () { var el = document.createElement('bootstrap') , transEndEventNames = { 'WebkitTransition' : 'webkitTransitionEnd' , 'MozTransition' : 'transitionend' , 'OTransition' : 'oTransitionEnd otransitionend' , 'transition' : 'transitionend' } , name for (name in transEndEventNames){ if (el.style[name] !== undefined) { return transEndEventNames[name] } } }()) return transitionEnd && { end: transitionEnd } })() }) }(window.jQuery);/* ========================================================== * bootstrap-alert.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#alerts * ========================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { "use strict"; // jshint ;_; /* ALERT CLASS DEFINITION * ====================== */ var dismiss = '[data-dismiss="alert"]' , Alert = function (el) { $(el).on('click', dismiss, this.close) } Alert.prototype.close = function (e) { var $this = $(this) , selector = $this.attr('data-target') , $parent if (!selector) { selector = $this.attr('href') selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7 } $parent = $(selector) e && e.preventDefault() $parent.length || ($parent = $this.hasClass('alert') ? $this : $this.parent()) $parent.trigger(e = $.Event('close')) if (e.isDefaultPrevented()) return $parent.removeClass('in') function removeElement() { $parent .trigger('closed') .remove() } $.support.transition && $parent.hasClass('fade') ? $parent.on($.support.transition.end, removeElement) : removeElement() } /* ALERT PLUGIN DEFINITION * ======================= */ $.fn.alert = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('alert') if (!data) $this.data('alert', (data = new Alert(this))) if (typeof option == 'string') data[option].call($this) }) } $.fn.alert.Constructor = Alert /* ALERT DATA-API * ============== */ $(document).on('click.alert.data-api', dismiss, Alert.prototype.close) }(window.jQuery);/* ============================================================ * bootstrap-button.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#buttons * ============================================================ * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* BUTTON PUBLIC CLASS DEFINITION * ============================== */ var Button = function (element, options) { this.$element = $(element) this.options = $.extend({}, $.fn.button.defaults, options) } Button.prototype.setState = function (state) { var d = 'disabled' , $el = this.$element , data = $el.data() , val = $el.is('input') ? 'val' : 'html' state = state + 'Text' data.resetText || $el.data('resetText', $el[val]()) $el[val](data[state] || this.options[state]) // push to event loop to allow forms to submit setTimeout(function () { state == 'loadingText' ? $el.addClass(d).attr(d, d) : $el.removeClass(d).removeAttr(d) }, 0) } Button.prototype.toggle = function () { var $parent = this.$element.closest('[data-toggle="buttons-radio"]') $parent && $parent .find('.active') .removeClass('active') this.$element.toggleClass('active') } /* BUTTON PLUGIN DEFINITION * ======================== */ $.fn.button = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('button') , options = typeof option == 'object' && option if (!data) $this.data('button', (data = new Button(this, options))) if (option == 'toggle') data.toggle() else if (option) data.setState(option) }) } $.fn.button.defaults = { loadingText: 'loading...' } $.fn.button.Constructor = Button /* BUTTON DATA-API * =============== */ $(document).on('click.button.data-api', '[data-toggle^=button]', function (e) { var $btn = $(e.target) if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn') $btn.button('toggle') }) }(window.jQuery);/* ========================================================== * bootstrap-carousel.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#carousel * ========================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { "use strict"; // jshint ;_; /* CAROUSEL CLASS DEFINITION * ========================= */ var Carousel = function (element, options) { this.$element = $(element) this.options = options this.options.slide && this.slide(this.options.slide) this.options.pause == 'hover' && this.$element .on('mouseenter', $.proxy(this.pause, this)) .on('mouseleave', $.proxy(this.cycle, this)) } Carousel.prototype = { cycle: function (e) { if (!e) this.paused = false this.options.interval && !this.paused && (this.interval = setInterval($.proxy(this.next, this), this.options.interval)) return this } , to: function (pos) { var $active = this.$element.find('.item.active') , children = $active.parent().children() , activePos = children.index($active) , that = this if (pos > (children.length - 1) || pos < 0) return if (this.sliding) { return this.$element.one('slid', function () { that.to(pos) }) } if (activePos == pos) { return this.pause().cycle() } return this.slide(pos > activePos ? 'next' : 'prev', $(children[pos])) } , pause: function (e) { if (!e) this.paused = true if (this.$element.find('.next, .prev').length && $.support.transition.end) { this.$element.trigger($.support.transition.end) this.cycle() } clearInterval(this.interval) this.interval = null return this } , next: function () { if (this.sliding) return return this.slide('next') } , prev: function () { if (this.sliding) return return this.slide('prev') } , slide: function (type, next) { var $active = this.$element.find('.item.active') , $next = next || $active[type]() , isCycling = this.interval , direction = type == 'next' ? 'left' : 'right' , fallback = type == 'next' ? 'first' : 'last' , that = this , e this.sliding = true isCycling && this.pause() $next = $next.length ? $next : this.$element.find('.item')[fallback]() e = $.Event('slide', { relatedTarget: $next[0] }) if ($next.hasClass('active')) return if ($.support.transition && this.$element.hasClass('slide')) { this.$element.trigger(e) if (e.isDefaultPrevented()) return $next.addClass(type) $next[0].offsetWidth // force reflow $active.addClass(direction) $next.addClass(direction) this.$element.one($.support.transition.end, function () { $next.removeClass([type, direction].join(' ')).addClass('active') $active.removeClass(['active', direction].join(' ')) that.sliding = false setTimeout(function () { that.$element.trigger('slid') }, 0) }) } else { this.$element.trigger(e) if (e.isDefaultPrevented()) return $active.removeClass('active') $next.addClass('active') this.sliding = false this.$element.trigger('slid') } isCycling && this.cycle() return this } } /* CAROUSEL PLUGIN DEFINITION * ========================== */ $.fn.carousel = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('carousel') , options = $.extend({}, $.fn.carousel.defaults, typeof option == 'object' && option) , action = typeof option == 'string' ? option : options.slide if (!data) $this.data('carousel', (data = new Carousel(this, options))) if (typeof option == 'number') data.to(option) else if (action) data[action]() else if (options.interval) data.cycle() }) } $.fn.carousel.defaults = { interval: 5000 , pause: 'hover' } $.fn.carousel.Constructor = Carousel /* CAROUSEL DATA-API * ================= */ $(document).on('click.carousel.data-api', '[data-slide]', function (e) { var $this = $(this), href , $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7 , options = $.extend({}, $target.data(), $this.data()) $target.carousel(options) e.preventDefault() }) }(window.jQuery);/* ============================================================= * bootstrap-collapse.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#collapse * ============================================================= * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* COLLAPSE PUBLIC CLASS DEFINITION * ================================ */ var Collapse = function (element, options) { this.$element = $(element) this.options = $.extend({}, $.fn.collapse.defaults, options) if (this.options.parent) { this.$parent = $(this.options.parent) } this.options.toggle && this.toggle() } Collapse.prototype = { constructor: Collapse , dimension: function () { var hasWidth = this.$element.hasClass('width') return hasWidth ? 'width' : 'height' } , show: function () { var dimension , scroll , actives , hasData if (this.transitioning) return dimension = this.dimension() scroll = $.camelCase(['scroll', dimension].join('-')) actives = this.$parent && this.$parent.find('> .accordion-group > .in') if (actives && actives.length) { hasData = actives.data('collapse') if (hasData && hasData.transitioning) return actives.collapse('hide') hasData || actives.data('collapse', null) } this.$element[dimension](0) this.transition('addClass', $.Event('show'), 'shown') $.support.transition && this.$element[dimension](this.$element[0][scroll]) } , hide: function () { var dimension if (this.transitioning) return dimension = this.dimension() this.reset(this.$element[dimension]()) this.transition('removeClass', $.Event('hide'), 'hidden') this.$element[dimension](0) } , reset: function (size) { var dimension = this.dimension() this.$element .removeClass('collapse') [dimension](size || 'auto') [0].offsetWidth this.$element[size !== null ? 'addClass' : 'removeClass']('collapse') return this } , transition: function (method, startEvent, completeEvent) { var that = this , complete = function () { if (startEvent.type == 'show') that.reset() that.transitioning = 0 that.$element.trigger(completeEvent) } this.$element.trigger(startEvent) if (startEvent.isDefaultPrevented()) return this.transitioning = 1 this.$element[method]('in') $.support.transition && this.$element.hasClass('collapse') ? this.$element.one($.support.transition.end, complete) : complete() } , toggle: function () { this[this.$element.hasClass('in') ? 'hide' : 'show']() } } /* COLLAPSIBLE PLUGIN DEFINITION * ============================== */ $.fn.collapse = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('collapse') , options = typeof option == 'object' && option if (!data) $this.data('collapse', (data = new Collapse(this, options))) if (typeof option == 'string') data[option]() }) } $.fn.collapse.defaults = { toggle: true } $.fn.collapse.Constructor = Collapse /* COLLAPSIBLE DATA-API * ==================== */ $(document).on('click.collapse.data-api', '[data-toggle=collapse]', function (e) { var $this = $(this), href , target = $this.attr('data-target') || e.preventDefault() || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7 , option = $(target).data('collapse') ? 'toggle' : $this.data() $this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed') $(target).collapse(option) }) }(window.jQuery);/* ============================================================ * bootstrap-dropdown.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#dropdowns * ============================================================ * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* DROPDOWN CLASS DEFINITION * ========================= */ var toggle = '[data-toggle=dropdown]' , Dropdown = function (element) { var $el = $(element).on('click.dropdown.data-api', this.toggle) $('html').on('click.dropdown.data-api', function () { $el.parent().removeClass('open') }) } Dropdown.prototype = { constructor: Dropdown , toggle: function (e) { var $this = $(this) , $parent , isActive if ($this.is('.disabled, :disabled')) return $parent = getParent($this) isActive = $parent.hasClass('open') clearMenus() if (!isActive) { $parent.toggleClass('open') $this.focus() } return false } , keydown: function (e) { var $this , $items , $active , $parent , isActive , index if (!/(38|40|27)/.test(e.keyCode)) return $this = $(this) e.preventDefault() e.stopPropagation() if ($this.is('.disabled, :disabled')) return $parent = getParent($this) isActive = $parent.hasClass('open') if (!isActive || (isActive && e.keyCode == 27)) return $this.click() $items = $('[role=menu] li:not(.divider) a', $parent) if (!$items.length) return index = $items.index($items.filter(':focus')) if (e.keyCode == 38 && index > 0) index-- // up if (e.keyCode == 40 && index < $items.length - 1) index++ // down if (!~index) index = 0 $items .eq(index) .focus() } } function clearMenus() { $(toggle).each(function () { getParent($(this)).removeClass('open') }) } function getParent($this) { var selector = $this.attr('data-target') , $parent if (!selector) { selector = $this.attr('href') selector = selector && /#/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7 } $parent = $(selector) $parent.length || ($parent = $this.parent()) return $parent } /* DROPDOWN PLUGIN DEFINITION * ========================== */ $.fn.dropdown = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('dropdown') if (!data) $this.data('dropdown', (data = new Dropdown(this))) if (typeof option == 'string') data[option].call($this) }) } $.fn.dropdown.Constructor = Dropdown /* APPLY TO STANDARD DROPDOWN ELEMENTS * =================================== */ $(document) .on('click.dropdown.data-api touchstart.dropdown.data-api', clearMenus) .on('click.dropdown touchstart.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() }) .on('click.dropdown.data-api touchstart.dropdown.data-api' , toggle, Dropdown.prototype.toggle) .on('keydown.dropdown.data-api touchstart.dropdown.data-api', toggle + ', [role=menu]' , Dropdown.prototype.keydown) }(window.jQuery);/* ========================================================= * bootstrap-modal.js v2.2.1 * http://twitter.github.com/bootstrap/javascript.html#modals * ========================================================= * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================= */ !function ($) { "use strict"; // jshint ;_; /* MODAL CLASS DEFINITION * ====================== */ var Modal = function (element, options) { this.options = options this.$element = $(element) .delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this)) this.options.remote && this.$element.find('.modal-body').load(this.options.remote) } Modal.prototype = { constructor: Modal , toggle: function () { return this[!this.isShown ? 'show' : 'hide']() } , show: function () { var that = this , e = $.Event('show') this.$element.trigger(e) if (this.isShown || e.isDefaultPrevented()) return this.isShown = true this.escape() this.backdrop(function () { var transition = $.support.transition && that.$element.hasClass('fade') if (!that.$element.parent().length) { that.$element.appendTo(document.body) //don't move modals dom position } that.$element .show() if (transition) { that.$element[0].offsetWidth // force reflow } that.$element .addClass('in') .attr('aria-hidden', false) that.enforceFocus() transition ? that.$element.one($.support.transition.end, function () { that.$element.focus().trigger('shown') }) : that.$element.focus().trigger('shown') }) } , hide: function (e) { e && e.preventDefault() var that = this e = $.Event('hide') this.$element.trigger(e) if (!this.isShown || e.isDefaultPrevented()) return this.isShown = false this.escape() $(document).off('focusin.modal') this.$element .removeClass('in') .attr('aria-hidden', true) $.support.transition && this.$element.hasClass('fade') ? this.hideWithTransition() : this.hideModal() } , enforceFocus: function () { var that = this $(document).on('focusin.modal', function (e) { if (that.$element[0] !== e.target && !that.$element.has(e.target).length) { that.$element.focus() } }) } , escape: function () { var that = this if (this.isShown && this.options.keyboard) { this.$element.on('keyup.dismiss.modal', function ( e ) { e.which == 27 && that.hide() }) } else if (!this.isShown) { this.$element.off('keyup.dismiss.modal') } } , hideWithTransition: function () { var that = this , timeout = setTimeout(function () { that.$element.off($.support.transition.end) that.hideModal() }, 500) this.$element.one($.support.transition.end, function () { clearTimeout(timeout) that.hideModal() }) } , hideModal: function (that) { this.$element .hide() .trigger('hidden') this.backdrop() } , removeBackdrop: function () { this.$backdrop.remove() this.$backdrop = null } , backdrop: function (callback) { var that = this , animate = this.$element.hasClass('fade') ? 'fade' : '' if (this.isShown && this.options.backdrop) { var doAnimate = $.support.transition && animate this.$backdrop = $('