{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'GitHub加速' && linkText !== 'GitHub加速' ) { link.textContent = 'GitHub加速'; link.href = 'https://githubproxy.cc'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Vibevoice' ) { link.textContent = 'Vibevoice'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 替换Pricing链接 - 仅替换一次 else if ( (linkHref.includes('/pricing') || linkHref === '/pricing' || linkText === 'Pricing' || linkText.match(/^s*Pricings*$/i)) && linkText !== 'VoxCPM' ) { link.textContent = 'VoxCPM'; link.href = 'https://voxcpm.net/'; replacedLinks.add(link); } // 替换Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) && linkText !== 'IndexTTS2' ) { link.textContent = 'IndexTTS2'; link.href = 'https://vibevoice.info/indextts2'; replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'GitHub加速'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n '''\n\n"}}},{"rowIdx":423,"cells":{"text":{"kind":"string","value":"\nfrom absl import flags\nfrom perfkitbenchmarker import errors\n\nflags.DEFINE_enum('tcmalloc_version', 'off',\n ['off', 'gperftools', 'experimental'],\n 'the tcmalloc version to be preloaded')\nflags.DEFINE_string(\n 'tcmalloc_experimental_url', '',\n 'the GCS URL for downloading the tcmalloc experimental lib')\nflags.DEFINE_string(\n 'tcmalloc_settings',\n '',\n 'tcmalloc settings modifying runtime behavior as environment variables '\n 'such as \"ARG1=foo,ARG2=bar\", see more: '\n 'https://gperftools.github.io/gperftools/tcmalloc.html',\n)\n\nFLAGS = flags.FLAGS\n\nTEMP_BASHRC = '/tmp/bash.bashrc'\nBASHRC = '/etc/bash.bashrc'\n\n\ndef AptInstall(vm):\n \"\"\"Installs the tcmalloc shared library on a Debian VM.\"\"\"\n if FLAGS.tcmalloc_version == 'off':\n return\n\n # Write tcmalloc settings as environment variables\n settings = FLAGS.tcmalloc_settings.split(',')\n for setting in settings:\n if setting:\n vm.RemoteCommand('echo \"export {setting}\" | sudo tee -a {tmp}'.format(\n setting=setting, # e.g. 'TCMALLOC_RELEASE_RATE=0.5'\n tmp=TEMP_BASHRC,\n ))\n\n if FLAGS.tcmalloc_version == 'gperftools':\n vm.InstallPackages('libgoogle-perftools-dev')\n libtcmalloc_paths = [\n '/usr/lib/libtcmalloc.so.4', # before v2.7\n '/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4', # since v2.7\n ]\n vm.RemoteCommand(\n 'test -f {path1} '\n '&& echo \"export LD_PRELOAD={path1}\" | sudo tee -a {tmp} '\n '|| echo \"export LD_PRELOAD={path2}\" | sudo tee -a {tmp} '.format(\n path1=libtcmalloc_paths[0],\n path2=libtcmalloc_paths[1],\n tmp=TEMP_BASHRC,\n ))\n\n if FLAGS.tcmalloc_version == 'experimental':\n vm.Install('google_cloud_sdk')\n local_path = '/tmp/libtcmalloc.so'\n vm.RemoteCommand(\n 'gsutil cp {url} {path} '\n '&& echo \"export LD_PRELOAD={path}\" | sudo tee -a {tmp}'.format(\n url=FLAGS.tcmalloc_experimental_url,\n path=local_path,\n tmp=TEMP_BASHRC))\n\n # The environment variables must be exported before a potential termination\n # of bashrc when the shell is not interactive\n vm.RemoteCommand('sudo cat {tmp} {bashrc} | sudo tee {bashrc}'.format(\n tmp=TEMP_BASHRC,\n bashrc=BASHRC,\n ))\n\n # Verify that libtcmalloc is preloaded in new process\n stdout, unused_stderr = vm.RemoteCommand('echo $LD_PRELOAD')\n if 'libtcmalloc.so' not in stdout:\n raise errors.Setup.InvalidSetupError(\n 'Fail to install tcmalloc. LD_PRELOAD=\"{}\"'.format(stdout))\n\n"}}},{"rowIdx":424,"cells":{"text":{"kind":"string","value":"\nfrom tabulate import tabulate\nfrom flask_script import Manager\n\nfrom lemur.reporting.service import fqdns, expiring_certificates\n\nmanager = Manager(usage=\"Reporting related tasks.\")\n\n\n@manager.option(\n \"-v\",\n \"--validity\",\n dest=\"validity\",\n choices=[\"all\", \"expired\", \"valid\"],\n default=\"all\",\n help=\"Filter certificates by validity.\",\n)\n@manager.option(\n \"-d\",\n \"--deployment\",\n dest=\"deployment\",\n choices=[\"all\", \"deployed\", \"ready\"],\n default=\"all\",\n help=\"Filter by deployment status.\",\n)\ndef fqdn(deployment, validity):\n \"\"\"\n Generates a report in order to determine the number of FQDNs covered by Lemur issued certificates.\n \"\"\"\n headers = [\n \"FQDN\",\n \"Root Domain\",\n \"Issuer\",\n \"Owner\",\n \"Validity End\",\n \"Total Length (days), Time Until Expiration (days)\",\n ]\n rows = []\n\n for cert in fqdns(validity=validity, deployment=deployment).all():\n for domain in cert.domains:\n rows.append(\n [\n domain.name,\n \".\".join(domain.name.split(\".\")[1:]),\n cert.issuer,\n cert.owner,\n cert.not_after,\n cert.validity_range.days,\n cert.validity_remaining.days,\n ]\n )\n\n print(tabulate(rows, headers=headers))\n\n\n@manager.option(\"-ttl\", \"--ttl\", dest=\"ttl\", default=30, help=\"Days til expiration.\")\n@manager.option(\n \"-d\",\n \"--deployment\",\n dest=\"deployment\",\n choices=[\"all\", \"deployed\", \"ready\"],\n default=\"all\",\n help=\"Filter by deployment status.\",\n)\ndef expiring(ttl, deployment):\n \"\"\"\n Returns certificates expiring in the next n days.\n \"\"\"\n headers = [\"Common Name\", \"Owner\", \"Issuer\", \"Validity End\", \"Endpoint\"]\n rows = []\n\n for cert in expiring_certificates(ttl=ttl, deployment=deployment).all():\n for endpoint in cert.endpoints:\n rows.append(\n [cert.cn, cert.owner, cert.issuer, cert.not_after, endpoint.dnsname]\n )\n\n print(tabulate(rows, headers=headers))\n\n"}}},{"rowIdx":425,"cells":{"text":{"kind":"string","value":"\nfrom datetime import timedelta\nimport logging\nfrom typing import List\n\nfrom greeclimate.device import (\n FanSpeed,\n HorizontalSwing,\n Mode,\n TemperatureUnits,\n VerticalSwing,\n)\nfrom greeclimate.exceptions import DeviceTimeoutError\n\nfrom homeassistant.components.climate import ClimateEntity\nfrom homeassistant.components.climate.const import (\n FAN_AUTO,\n FAN_HIGH,\n FAN_LOW,\n FAN_MEDIUM,\n HVAC_MODE_AUTO,\n HVAC_MODE_COOL,\n HVAC_MODE_DRY,\n HVAC_MODE_FAN_ONLY,\n HVAC_MODE_HEAT,\n HVAC_MODE_OFF,\n PRESET_AWAY,\n PRESET_BOOST,\n PRESET_ECO,\n PRESET_NONE,\n PRESET_SLEEP,\n SUPPORT_FAN_MODE,\n SUPPORT_PRESET_MODE,\n SUPPORT_SWING_MODE,\n SUPPORT_TARGET_TEMPERATURE,\n SWING_BOTH,\n SWING_HORIZONTAL,\n SWING_OFF,\n SWING_VERTICAL,\n)\nfrom homeassistant.const import (\n ATTR_TEMPERATURE,\n PRECISION_WHOLE,\n TEMP_CELSIUS,\n TEMP_FAHRENHEIT,\n)\nfrom homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC\n\nfrom .const import (\n DOMAIN,\n FAN_MEDIUM_HIGH,\n FAN_MEDIUM_LOW,\n MAX_ERRORS,\n MAX_TEMP,\n MIN_TEMP,\n TARGET_TEMPERATURE_STEP,\n)\n\n_LOGGER = logging.getLogger(__name__)\n\nSCAN_INTERVAL = timedelta(seconds=60)\nPARALLEL_UPDATES = 0\n\nHVAC_MODES = {\n Mode.Auto: HVAC_MODE_AUTO,\n Mode.Cool: HVAC_MODE_COOL,\n Mode.Dry: HVAC_MODE_DRY,\n Mode.Fan: HVAC_MODE_FAN_ONLY,\n Mode.Heat: HVAC_MODE_HEAT,\n}\nHVAC_MODES_REVERSE = {v: k for k, v in HVAC_MODES.items()}\n\nPRESET_MODES = [\n PRESET_ECO, # Power saving mode\n PRESET_AWAY, # Steady heat, or 8C mode on gree units\n PRESET_BOOST, # Turbo mode\n PRESET_NONE, # Default operating mode\n PRESET_SLEEP, # Sleep mode\n]\n\nFAN_MODES = {\n FanSpeed.Auto: FAN_AUTO,\n FanSpeed.Low: FAN_LOW,\n FanSpeed.MediumLow: FAN_MEDIUM_LOW,\n FanSpeed.Medium: FAN_MEDIUM,\n FanSpeed.MediumHigh: FAN_MEDIUM_HIGH,\n FanSpeed.High: FAN_HIGH,\n}\nFAN_MODES_REVERSE = {v: k for k, v in FAN_MODES.items()}\n\nSWING_MODES = [SWING_OFF, SWING_VERTICAL, SWING_HORIZONTAL, SWING_BOTH]\n\nSUPPORTED_FEATURES = (\n SUPPORT_TARGET_TEMPERATURE\n | SUPPORT_FAN_MODE\n | SUPPORT_PRESET_MODE\n | SUPPORT_SWING_MODE\n)\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the Gree HVAC device from a config entry.\"\"\"\n async_add_entities(\n GreeClimateEntity(device) for device in hass.data[DOMAIN].pop(\"pending\")\n )\n\n\nclass GreeClimateEntity(ClimateEntity):\n \"\"\"Representation of a Gree HVAC device.\"\"\"\n\n def __init__(self, device):\n \"\"\"Initialize the Gree device.\"\"\"\n self._device = device\n self._name = device.device_info.name\n self._mac = device.device_info.mac\n self._available = False\n self._error_count = 0\n\n async def async_update(self):\n \"\"\"Update the state of the device.\"\"\"\n try:\n await self._device.update_state()\n\n if not self._available and self._error_count:\n _LOGGER.warning(\n \"Device is available: %s (%s)\",\n self._name,\n str(self._device.device_info),\n )\n\n self._available = True\n self._error_count = 0\n except DeviceTimeoutError:\n self._error_count += 1\n\n # Under normal conditions GREE units timeout every once in a while\n if self._available and self._error_count >= MAX_ERRORS:\n self._available = False\n _LOGGER.warning(\n \"Device is unavailable: %s (%s)\",\n self._name,\n self._device.device_info,\n )\n except Exception: # pylint: disable=broad-except\n # Under normal conditions GREE units timeout every once in a while\n if self._available:\n self._available = False\n _LOGGER.exception(\n \"Unknown exception caught during update by gree device: %s (%s)\",\n self._name,\n self._device.device_info,\n )\n\n async def _push_state_update(self):\n \"\"\"Send state updates to the physical device.\"\"\"\n try:\n return await self._device.push_state_update()\n except DeviceTimeoutError:\n self._error_count += 1\n\n # Under normal conditions GREE units timeout every once in a while\n if self._available and self._error_count >= MAX_ERRORS:\n self._available = False\n _LOGGER.warning(\n \"Device timedout while sending state update: %s (%s)\",\n self._name,\n self._device.device_info,\n )\n except Exception: # pylint: disable=broad-except\n # Under normal conditions GREE units timeout every once in a while\n if self._available:\n self._available = False\n _LOGGER.exception(\n \"Unknown exception caught while sending state update to: %s (%s)\",\n self._name,\n self._device.device_info,\n )\n\n @property\n def available(self) -> bool:\n \"\"\"Return if the device is available.\"\"\"\n return self._available\n\n @property\n def name(self) -> str:\n \"\"\"Return the name of the device.\"\"\"\n return self._name\n\n @property\n def unique_id(self) -> str:\n \"\"\"Return a unique id for the device.\"\"\"\n return self._mac\n\n @property\n def device_info(self):\n \"\"\"Return device specific attributes.\"\"\"\n return {\n \"name\": self._name,\n \"identifiers\": {(DOMAIN, self._mac)},\n \"manufacturer\": \"Gree\",\n \"connections\": {(CONNECTION_NETWORK_MAC, self._mac)},\n }\n\n @property\n def temperature_unit(self) -> str:\n \"\"\"Return the temperature units for the device.\"\"\"\n units = self._device.temperature_units\n return TEMP_CELSIUS if units == TemperatureUnits.C else TEMP_FAHRENHEIT\n\n @property\n def precision(self) -> float:\n \"\"\"Return the precision of temperature for the device.\"\"\"\n return PRECISION_WHOLE\n\n @property\n def current_temperature(self) -> float:\n \"\"\"Return the target temperature, gree devices don't provide internal temp.\"\"\"\n return self.target_temperature\n\n @property\n def target_temperature(self) -> float:\n \"\"\"Return the target temperature for the device.\"\"\"\n return self._device.target_temperature\n\n async def async_set_temperature(self, **kwargs):\n \"\"\"Set new target temperature.\"\"\"\n if ATTR_TEMPERATURE not in kwargs:\n raise ValueError(f\"Missing parameter {ATTR_TEMPERATURE}\")\n\n temperature = kwargs[ATTR_TEMPERATURE]\n _LOGGER.debug(\n \"Setting temperature to %d for %s\",\n temperature,\n self._name,\n )\n\n self._device.target_temperature = round(temperature)\n await self._push_state_update()\n\n @property\n def min_temp(self) -> float:\n \"\"\"Return the minimum temperature supported by the device.\"\"\"\n return MIN_TEMP\n\n @property\n def max_temp(self) -> float:\n \"\"\"Return the maximum temperature supported by the device.\"\"\"\n return MAX_TEMP\n\n @property\n def target_temperature_step(self) -> float:\n \"\"\"Return the target temperature step support by the device.\"\"\"\n return TARGET_TEMPERATURE_STEP\n\n @property\n def hvac_mode(self) -> str:\n \"\"\"Return the current HVAC mode for the device.\"\"\"\n if not self._device.power:\n return HVAC_MODE_OFF\n\n return HVAC_MODES.get(self._device.mode)\n\n async def async_set_hvac_mode(self, hvac_mode):\n \"\"\"Set new target hvac mode.\"\"\"\n if hvac_mode not in self.hvac_modes:\n raise ValueError(f\"Invalid hvac_mode: {hvac_mode}\")\n\n _LOGGER.debug(\n \"Setting HVAC mode to %s for device %s\",\n hvac_mode,\n self._name,\n )\n\n if hvac_mode == HVAC_MODE_OFF:\n self._device.power = False\n await self._push_state_update()\n return\n\n if not self._device.power:\n self._device.power = True\n\n self._device.mode = HVAC_MODES_REVERSE.get(hvac_mode)\n await self._push_state_update()\n\n @property\n def hvac_modes(self) -> List[str]:\n \"\"\"Return the HVAC modes support by the device.\"\"\"\n modes = [*HVAC_MODES_REVERSE]\n modes.append(HVAC_MODE_OFF)\n return modes\n\n @property\n def preset_mode(self) -> str:\n \"\"\"Return the current preset mode for the device.\"\"\"\n if self._device.steady_heat:\n return PRESET_AWAY\n if self._device.power_save:\n return PRESET_ECO\n if self._device.sleep:\n return PRESET_SLEEP\n if self._device.turbo:\n return PRESET_BOOST\n return PRESET_NONE\n\n async def async_set_preset_mode(self, preset_mode):\n \"\"\"Set new preset mode.\"\"\"\n if preset_mode not in PRESET_MODES:\n raise ValueError(f\"Invalid preset mode: {preset_mode}\")\n\n _LOGGER.debug(\n \"Setting preset mode to %s for device %s\",\n preset_mode,\n self._name,\n )\n\n self._device.steady_heat = False\n self._device.power_save = False\n self._device.turbo = False\n self._device.sleep = False\n\n if preset_mode == PRESET_AWAY:\n self._device.steady_heat = True\n elif preset_mode == PRESET_ECO:\n self._device.power_save = True\n elif preset_mode == PRESET_BOOST:\n self._device.turbo = True\n elif preset_mode == PRESET_SLEEP:\n self._device.sleep = True\n\n await self._push_state_update()\n\n @property\n def preset_modes(self) -> List[str]:\n \"\"\"Return the preset modes support by the device.\"\"\"\n return PRESET_MODES\n\n @property\n def fan_mode(self) -> str:\n \"\"\"Return the current fan mode for the device.\"\"\"\n speed = self._device.fan_speed\n return FAN_MODES.get(speed)\n\n async def async_set_fan_mode(self, fan_mode):\n \"\"\"Set new target fan mode.\"\"\"\n if fan_mode not in FAN_MODES_REVERSE:\n raise ValueError(f\"Invalid fan mode: {fan_mode}\")\n\n self._device.fan_speed = FAN_MODES_REVERSE.get(fan_mode)\n await self._push_state_update()\n\n @property\n def fan_modes(self) -> List[str]:\n \"\"\"Return the fan modes support by the device.\"\"\"\n return [*FAN_MODES_REVERSE]\n\n @property\n def swing_mode(self) -> str:\n \"\"\"Return the current swing mode for the device.\"\"\"\n h_swing = self._device.horizontal_swing == HorizontalSwing.FullSwing\n v_swing = self._device.vertical_swing == VerticalSwing.FullSwing\n\n if h_swing and v_swing:\n return SWING_BOTH\n if h_swing:\n return SWING_HORIZONTAL\n if v_swing:\n return SWING_VERTICAL\n return SWING_OFF\n\n async def async_set_swing_mode(self, swing_mode):\n \"\"\"Set new target swing operation.\"\"\"\n if swing_mode not in SWING_MODES:\n raise ValueError(f\"Invalid swing mode: {swing_mode}\")\n\n _LOGGER.debug(\n \"Setting swing mode to %s for device %s\",\n swing_mode,\n self._name,\n )\n\n self._device.horizontal_swing = HorizontalSwing.Center\n self._device.vertical_swing = VerticalSwing.FixedMiddle\n if swing_mode in (SWING_BOTH, SWING_HORIZONTAL):\n self._device.horizontal_swing = HorizontalSwing.FullSwing\n if swing_mode in (SWING_BOTH, SWING_VERTICAL):\n self._device.vertical_swing = VerticalSwing.FullSwing\n\n await self._push_state_update()\n\n @property\n def swing_modes(self) -> List[str]:\n \"\"\"Return the swing modes currently supported for this device.\"\"\"\n return SWING_MODES\n\n @property\n def supported_features(self) -> int:\n \"\"\"Return the supported features for this device integration.\"\"\"\n return SUPPORTED_FEATURES\n\n"}}},{"rowIdx":426,"cells":{"text":{"kind":"string","value":"\nimport argparse\nimport pathlib\nimport sys\nimport os\nimport os.path\nimport shutil\nimport venv\nimport subprocess\nfrom typing import List, Optional, Tuple\n\nsys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))\nfrom scripts import utils, link_pyqt\n\n\nREPO_ROOT = pathlib.Path(__file__).parent.parent\n\n\ndef parse_args() -> argparse.Namespace:\n \"\"\"Parse commandline arguments.\"\"\"\n parser = argparse.ArgumentParser(description=__doc__)\n parser.add_argument('--keep',\n action='store_true',\n help=\"Reuse an existing virtualenv.\")\n parser.add_argument('--venv-dir',\n default='.venv',\n help=\"Where to place the virtualenv.\")\n parser.add_argument('--pyqt-version',\n choices=pyqt_versions(),\n default='auto',\n help=\"PyQt version to install.\")\n parser.add_argument('--pyqt-type',\n choices=['binary', 'source', 'link', 'wheels', 'skip'],\n default='binary',\n help=\"How to install PyQt/Qt.\")\n parser.add_argument('--pyqt-wheels-dir',\n default='wheels',\n help=\"Directory to get PyQt wheels from.\")\n parser.add_argument('--virtualenv',\n action='store_true',\n help=\"Use virtualenv instead of venv.\")\n parser.add_argument('--asciidoc', help=\"Full path to python and \"\n \"asciidoc.py. If not given, it's searched in PATH.\",\n nargs=2, required=False,\n metavar=('PYTHON', 'ASCIIDOC'))\n parser.add_argument('--dev',\n action='store_true',\n help=\"Also install dev/test dependencies.\")\n parser.add_argument('--skip-docs',\n action='store_true',\n help=\"Skip doc generation.\")\n parser.add_argument('--tox-error',\n action='store_true',\n help=argparse.SUPPRESS)\n return parser.parse_args()\n\n\ndef pyqt_versions() -> List[str]:\n \"\"\"Get a list of all available PyQt versions.\n\n The list is based on the filenames of misc/requirements/ files.\n \"\"\"\n version_set = set()\n\n requirements_dir = REPO_ROOT / 'misc' / 'requirements'\n for req in requirements_dir.glob('requirements-pyqt-*.txt'):\n version_set.add(req.stem.split('-')[-1])\n\n versions = sorted(version_set,\n key=lambda v: [int(c) for c in v.split('.')])\n return versions + ['auto']\n\n\ndef run_venv(venv_dir: pathlib.Path, executable, *args: str) -> None:\n \"\"\"Run the given command inside the virtualenv.\"\"\"\n subdir = 'Scripts' if os.name == 'nt' else 'bin'\n\n try:\n subprocess.run([str(venv_dir / subdir / executable)] +\n [str(arg) for arg in args], check=True)\n except subprocess.CalledProcessError as e:\n utils.print_error(\"Subprocess failed, exiting\")\n sys.exit(e.returncode)\n\n\ndef pip_install(venv_dir: pathlib.Path, *args: str) -> None:\n \"\"\"Run a pip install command inside the virtualenv.\"\"\"\n arg_str = ' '.join(str(arg) for arg in args)\n utils.print_col('venv$ pip install {}'.format(arg_str), 'blue')\n run_venv(venv_dir, 'python', '-m', 'pip', 'install', *args)\n\n\ndef delete_old_venv(venv_dir: pathlib.Path) -> None:\n \"\"\"Remove an existing virtualenv directory.\"\"\"\n if not venv_dir.exists():\n return\n\n markers = [\n venv_dir / '.tox-config1', # tox\n venv_dir / 'pyvenv.cfg', # venv\n venv_dir / 'Scripts', # Windows\n venv_dir / 'bin', # Linux\n ]\n\n if not any(m.exists() for m in markers):\n utils.print_error('{} does not look like a virtualenv, '\n 'cowardly refusing to remove it.'.format(venv_dir))\n sys.exit(1)\n\n utils.print_col('$ rm -r {}'.format(venv_dir), 'blue')\n shutil.rmtree(str(venv_dir))\n\n\ndef create_venv(venv_dir: pathlib.Path, use_virtualenv: bool = False) -> None:\n \"\"\"Create a new virtualenv.\"\"\"\n if use_virtualenv:\n utils.print_col('$ python3 -m virtualenv {}'.format(venv_dir), 'blue')\n try:\n subprocess.run([sys.executable, '-m', 'virtualenv', venv_dir],\n check=True)\n except subprocess.CalledProcessError as e:\n utils.print_error(\"virtualenv failed, exiting\")\n sys.exit(e.returncode)\n else:\n utils.print_col('$ python3 -m venv {}'.format(venv_dir), 'blue')\n venv.create(str(venv_dir), with_pip=True)\n\n\ndef upgrade_seed_pkgs(venv_dir: pathlib.Path) -> None:\n \"\"\"Upgrade initial seed packages inside a virtualenv.\n\n This also makes sure that wheel is installed, which causes pip to use its\n wheel cache for rebuilds.\n \"\"\"\n utils.print_title(\"Upgrading initial packages\")\n pip_install(venv_dir, '-U', 'pip')\n pip_install(venv_dir, '-U', 'setuptools', 'wheel')\n\n\ndef requirements_file(name: str) -> pathlib.Path:\n \"\"\"Get the filename of a requirements file.\"\"\"\n return (REPO_ROOT / 'misc' / 'requirements' /\n 'requirements-{}.txt'.format(name))\n\n\ndef pyqt_requirements_file(version: str) -> pathlib.Path:\n \"\"\"Get the filename of the requirements file for the given PyQt version.\"\"\"\n name = 'pyqt' if version == 'auto' else 'pyqt-{}'.format(version)\n return requirements_file(name)\n\n\ndef install_pyqt_binary(venv_dir: pathlib.Path, version: str) -> None:\n \"\"\"Install PyQt from a binary wheel.\"\"\"\n utils.print_title(\"Installing PyQt from binary\")\n utils.print_col(\"No proprietary codec support will be available in \"\n \"qutebrowser.\", 'bold')\n pip_install(venv_dir, '-r', pyqt_requirements_file(version),\n '--only-binary', 'PyQt5,PyQtWebEngine')\n\n\ndef install_pyqt_source(venv_dir: pathlib.Path, version: str) -> None:\n \"\"\"Install PyQt from the source tarball.\"\"\"\n utils.print_title(\"Installing PyQt from sources\")\n pip_install(venv_dir, '-r', pyqt_requirements_file(version),\n '--verbose', '--no-binary', 'PyQt5,PyQtWebEngine')\n\n\ndef install_pyqt_link(venv_dir: pathlib.Path) -> None:\n \"\"\"Install PyQt by linking a system-wide install.\"\"\"\n utils.print_title(\"Linking system-wide PyQt\")\n lib_path = link_pyqt.get_venv_lib_path(str(venv_dir))\n link_pyqt.link_pyqt(sys.executable, lib_path)\n\n\ndef install_pyqt_wheels(venv_dir: pathlib.Path,\n wheels_dir: pathlib.Path) -> None:\n \"\"\"Install PyQt from the wheels/ directory.\"\"\"\n utils.print_title(\"Installing PyQt wheels\")\n wheels = [str(wheel) for wheel in wheels_dir.glob('*.whl')]\n pip_install(venv_dir, *wheels)\n\n\ndef install_requirements(venv_dir: pathlib.Path) -> None:\n \"\"\"Install qutebrowser's requirement.txt.\"\"\"\n utils.print_title(\"Installing other qutebrowser dependencies\")\n requirements = REPO_ROOT / 'requirements.txt'\n pip_install(venv_dir, '-r', str(requirements))\n\n\ndef install_dev_requirements(venv_dir: pathlib.Path) -> None:\n \"\"\"Install development dependencies.\"\"\"\n utils.print_title(\"Installing dev dependencies\")\n pip_install(venv_dir,\n '-r', str(requirements_file('dev')),\n '-r', requirements_file('tests'))\n\n\ndef install_qutebrowser(venv_dir: pathlib.Path) -> None:\n \"\"\"Install qutebrowser itself as an editable install.\"\"\"\n utils.print_title(\"Installing qutebrowser\")\n pip_install(venv_dir, '-e', str(REPO_ROOT))\n\n\ndef regenerate_docs(venv_dir: pathlib.Path,\n asciidoc: Optional[Tuple[str, str]]):\n \"\"\"Regenerate docs using asciidoc.\"\"\"\n utils.print_title(\"Generating documentation\")\n if asciidoc is not None:\n a2h_args = ['--asciidoc'] + asciidoc\n else:\n a2h_args = []\n script_path = pathlib.Path(__file__).parent / 'asciidoc2html.py'\n\n utils.print_col('venv$ python3 scripts/asciidoc2html.py {}'\n .format(' '.join(a2h_args)), 'blue')\n run_venv(venv_dir, 'python', str(script_path), *a2h_args)\n\n\ndef main() -> None:\n \"\"\"Install qutebrowser in a virtualenv..\"\"\"\n args = parse_args()\n venv_dir = pathlib.Path(args.venv_dir)\n wheels_dir = pathlib.Path(args.pyqt_wheels_dir)\n utils.change_cwd()\n\n if (args.pyqt_version != 'auto' and\n args.pyqt_type not in ['binary', 'source']):\n utils.print_error('The --pyqt-version option is only available when '\n 'installing PyQt from binary or source')\n sys.exit(1)\n elif args.pyqt_wheels_dir != 'wheels' and args.pyqt_type != 'wheels':\n utils.print_error('The --pyqt-wheels-dir option is only available '\n 'when installing PyQt from wheels')\n sys.exit(1)\n\n if not args.keep:\n utils.print_title(\"Creating virtual environment\")\n delete_old_venv(venv_dir)\n create_venv(venv_dir, use_virtualenv=args.virtualenv)\n\n upgrade_seed_pkgs(venv_dir)\n\n if args.pyqt_type == 'binary':\n install_pyqt_binary(venv_dir, args.pyqt_version)\n elif args.pyqt_type == 'source':\n install_pyqt_source(venv_dir, args.pyqt_version)\n elif args.pyqt_type == 'link':\n install_pyqt_link(venv_dir)\n elif args.pyqt_type == 'wheels':\n install_pyqt_wheels(venv_dir, wheels_dir)\n elif args.pyqt_type == 'skip':\n pass\n else:\n raise AssertionError\n\n install_requirements(venv_dir)\n install_qutebrowser(venv_dir)\n if args.dev:\n install_dev_requirements(venv_dir)\n\n if not args.skip_docs:\n regenerate_docs(venv_dir, args.asciidoc)\n\n\nif __name__ == '__main__':\n main()\n\n"}}},{"rowIdx":427,"cells":{"text":{"kind":"string","value":"\nimport logging\nimport os\n\nlogging.basicConfig()\nlogger = logging.getLogger(\"kalliope\")\n\n\nclass FileManager:\n \"\"\"\n Class used to manage Files\n \"\"\"\n def __init__(self):\n pass\n\n @staticmethod\n def create_directory(cache_path):\n \"\"\"\n Create a directory at the provided `cache_path`\n :param cache_path: the path of the directory to create\n :type cache_path: str\n \"\"\"\n if not os.path.exists(cache_path):\n os.makedirs(cache_path)\n\n @staticmethod\n def write_in_file(file_path, content):\n \"\"\"\n Write contents into a file\n :param file_path: the path of the file to write on\n :type file_path: str\n :param content: the contents to write in the file\n :type content: str or bytes\n\n .. raises:: IOError\n \"\"\"\n try:\n with open(file_path, \"wb\") as file_open:\n if type(content) == bytes:\n file_open.write(content)\n else:\n file_open.write(content.encode())\n return not FileManager.file_is_empty(file_path)\n except IOError as e:\n logger.error(\"I/O error(%s): %s\", e.errno, e.strerror)\n return False\n\n @staticmethod\n def file_is_empty(file_path):\n \"\"\"\n Check if the file is empty\n :param file_path: the path of the file\n :return: True if the file is empty, False otherwise\n \"\"\"\n return os.path.getsize(file_path) == 0\n\n @staticmethod\n def remove_file(file_path):\n \"\"\"\n Remove the file locate at the provided `file_path`\n :param file_path:\n :return: True if the file has been removed successfully, False otherwise\n \"\"\"\n if os.path.exists(file_path):\n return os.remove(file_path)\n\n @staticmethod\n def is_path_creatable(pathname):\n \"\"\"\n `True` if the current user has sufficient permissions to create the passed\n pathname; `False` otherwise.\n \"\"\"\n dirname = os.path.dirname(pathname) or os.getcwd()\n return os.access(dirname, os.W_OK)\n\n @staticmethod\n def is_path_exists_or_creatable(pathname):\n \"\"\"\n `True` if the passed pathname is a valid pathname for the current OS _and_\n either currently exists or is hypothetically creatable; `False` otherwise.\n\n This function is guaranteed to _never_ raise exceptions.\n\n .. raises:: OSError\n \"\"\"\n try:\n return os.path.exists(pathname) or FileManager.is_path_creatable(pathname)\n except OSError as e:\n logger.error(\"OSError(%s): %s\", e.errno, e.strerror)\n return False\n\n"}}},{"rowIdx":428,"cells":{"text":{"kind":"string","value":"\nfrom datetime import datetime, timedelta\nimport logging\nimport time\n\nfrom pytz import timezone\nimport requests\nimport voluptuous as vol\n\nfrom homeassistant.components.sensor import PLATFORM_SCHEMA\nfrom homeassistant.const import (\n CONF_API_KEY,\n CONF_HOST,\n CONF_MONITORED_CONDITIONS,\n CONF_PORT,\n CONF_SSL,\n DATA_BYTES,\n DATA_EXABYTES,\n DATA_GIGABYTES,\n DATA_KILOBYTES,\n DATA_MEGABYTES,\n DATA_PETABYTES,\n DATA_TERABYTES,\n DATA_YOTTABYTES,\n DATA_ZETTABYTES,\n HTTP_OK,\n)\nimport homeassistant.helpers.config_validation as cv\nfrom homeassistant.helpers.entity import Entity\n\n_LOGGER = logging.getLogger(__name__)\n\nCONF_DAYS = \"days\"\nCONF_INCLUDED = \"include_paths\"\nCONF_UNIT = \"unit\"\nCONF_URLBASE = \"urlbase\"\n\nDEFAULT_HOST = \"localhost\"\nDEFAULT_PORT = 7878\nDEFAULT_URLBASE = \"\"\nDEFAULT_DAYS = \"1\"\nDEFAULT_UNIT = DATA_GIGABYTES\n\nSCAN_INTERVAL = timedelta(minutes=10)\n\nSENSOR_TYPES = {\n \"diskspace\": [\"Disk Space\", DATA_GIGABYTES, \"mdi:harddisk\"],\n \"upcoming\": [\"Upcoming\", \"Movies\", \"mdi:television\"],\n \"wanted\": [\"Wanted\", \"Movies\", \"mdi:television\"],\n \"movies\": [\"Movies\", \"Movies\", \"mdi:television\"],\n \"commands\": [\"Commands\", \"Commands\", \"mdi:code-braces\"],\n \"status\": [\"Status\", \"Status\", \"mdi:information\"],\n}\n\nENDPOINTS = {\n \"diskspace\": \"{0}://{1}:{2}/{3}api/diskspace\",\n \"upcoming\": \"{0}://{1}:{2}/{3}api/calendar?start={4}&end={5}\",\n \"movies\": \"{0}://{1}:{2}/{3}api/movie\",\n \"commands\": \"{0}://{1}:{2}/{3}api/command\",\n \"status\": \"{0}://{1}:{2}/{3}api/system/status\",\n}\n\n# Support to Yottabytes for the future, why not\nBYTE_SIZES = [\n DATA_BYTES,\n DATA_KILOBYTES,\n DATA_MEGABYTES,\n DATA_GIGABYTES,\n DATA_TERABYTES,\n DATA_PETABYTES,\n DATA_EXABYTES,\n DATA_ZETTABYTES,\n DATA_YOTTABYTES,\n]\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Required(CONF_API_KEY): cv.string,\n vol.Optional(CONF_DAYS, default=DEFAULT_DAYS): cv.string,\n vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,\n vol.Optional(CONF_INCLUDED, default=[]): cv.ensure_list,\n vol.Optional(CONF_MONITORED_CONDITIONS, default=[\"movies\"]): vol.All(\n cv.ensure_list, [vol.In(list(SENSOR_TYPES))]\n ),\n vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,\n vol.Optional(CONF_SSL, default=False): cv.boolean,\n vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): vol.In(BYTE_SIZES),\n vol.Optional(CONF_URLBASE, default=DEFAULT_URLBASE): cv.string,\n }\n)\n\n\ndef setup_platform(hass, config, add_entities, discovery_info=None):\n \"\"\"Set up the Radarr platform.\"\"\"\n conditions = config.get(CONF_MONITORED_CONDITIONS)\n add_entities([RadarrSensor(hass, config, sensor) for sensor in conditions], True)\n\n\nclass RadarrSensor(Entity):\n \"\"\"Implementation of the Radarr sensor.\"\"\"\n\n def __init__(self, hass, conf, sensor_type):\n \"\"\"Create Radarr entity.\"\"\"\n\n self.conf = conf\n self.host = conf.get(CONF_HOST)\n self.port = conf.get(CONF_PORT)\n self.urlbase = conf.get(CONF_URLBASE)\n if self.urlbase:\n self.urlbase = f\"{self.urlbase.strip('/')}/\"\n self.apikey = conf.get(CONF_API_KEY)\n self.included = conf.get(CONF_INCLUDED)\n self.days = int(conf.get(CONF_DAYS))\n self.ssl = \"https\" if conf.get(CONF_SSL) else \"http\"\n self._state = None\n self.data = []\n self._tz = timezone(str(hass.config.time_zone))\n self.type = sensor_type\n self._name = SENSOR_TYPES[self.type][0]\n if self.type == \"diskspace\":\n self._unit = conf.get(CONF_UNIT)\n else:\n self._unit = SENSOR_TYPES[self.type][1]\n self._icon = SENSOR_TYPES[self.type][2]\n self._available = False\n\n @property\n def name(self):\n \"\"\"Return the name of the sensor.\"\"\"\n return \"{} {}\".format(\"Radarr\", self._name)\n\n @property\n def state(self):\n \"\"\"Return sensor state.\"\"\"\n return self._state\n\n @property\n def available(self):\n \"\"\"Return sensor availability.\"\"\"\n return self._available\n\n @property\n def unit_of_measurement(self):\n \"\"\"Return the unit of the sensor.\"\"\"\n return self._unit\n\n @property\n def device_state_attributes(self):\n \"\"\"Return the state attributes of the sensor.\"\"\"\n attributes = {}\n if self.type == \"upcoming\":\n for movie in self.data:\n attributes[to_key(movie)] = get_release_date(movie)\n elif self.type == \"commands\":\n for command in self.data:\n attributes[command[\"name\"]] = command[\"state\"]\n elif self.type == \"diskspace\":\n for data in self.data:\n free_space = to_unit(data[\"freeSpace\"], self._unit)\n total_space = to_unit(data[\"totalSpace\"], self._unit)\n percentage_used = (\n 0 if total_space == 0 else free_space / total_space * 100\n )\n attributes[data[\"path\"]] = \"{:.2f}/{:.2f}{} ({:.2f}%)\".format(\n free_space, total_space, self._unit, percentage_used\n )\n elif self.type == \"movies\":\n for movie in self.data:\n attributes[to_key(movie)] = movie[\"downloaded\"]\n elif self.type == \"status\":\n attributes = self.data\n\n return attributes\n\n @property\n def icon(self):\n \"\"\"Return the icon of the sensor.\"\"\"\n return self._icon\n\n def update(self):\n \"\"\"Update the data for the sensor.\"\"\"\n start = get_date(self._tz)\n end = get_date(self._tz, self.days)\n try:\n res = requests.get(\n ENDPOINTS[self.type].format(\n self.ssl, self.host, self.port, self.urlbase, start, end\n ),\n headers={\"X-Api-Key\": self.apikey},\n timeout=10,\n )\n except OSError:\n _LOGGER.warning(\"Host %s is not available\", self.host)\n self._available = False\n self._state = None\n return\n\n if res.status_code == HTTP_OK:\n if self.type in [\"upcoming\", \"movies\", \"commands\"]:\n self.data = res.json()\n self._state = len(self.data)\n elif self.type == \"diskspace\":\n # If included paths are not provided, use all data\n if self.included == []:\n self.data = res.json()\n else:\n # Filter to only show lists that are included\n self.data = list(\n filter(lambda x: x[\"path\"] in self.included, res.json())\n )\n self._state = \"{:.2f}\".format(\n to_unit(sum([data[\"freeSpace\"] for data in self.data]), self._unit)\n )\n elif self.type == \"status\":\n self.data = res.json()\n self._state = self.data[\"version\"]\n self._available = True\n\n\ndef get_date(zone, offset=0):\n \"\"\"Get date based on timezone and offset of days.\"\"\"\n day = 60 * 60 * 24\n return datetime.date(datetime.fromtimestamp(time.time() + day * offset, tz=zone))\n\n\ndef get_release_date(data):\n \"\"\"Get release date.\"\"\"\n date = data.get(\"physicalRelease\")\n if not date:\n date = data.get(\"inCinemas\")\n return date\n\n\ndef to_key(data):\n \"\"\"Get key.\"\"\"\n return \"{} ({})\".format(data[\"title\"], data[\"year\"])\n\n\ndef to_unit(value, unit):\n \"\"\"Convert bytes to give unit.\"\"\"\n return value / 1024 ** BYTE_SIZES.index(unit)\n\n"}}},{"rowIdx":429,"cells":{"text":{"kind":"string","value":"\nimport os\n\nimport testinfra.utils.ansible_runner\n\ntestinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(\n os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')\n\n\ndef test_hostname(host):\n assert 'instance' == host.check_output('hostname -s')\n\n\ndef test_etc_molecule_directory(host):\n f = host.file('/etc/molecule')\n\n assert f.is_directory\n assert f.user == 'root'\n assert f.group == 'root'\n assert f.mode == 0o755\n\n\ndef test_etc_molecule_ansible_hostname_file(host):\n f = host.file('/etc/molecule/instance')\n\n assert f.is_file\n assert f.user == 'root'\n assert f.group == 'root'\n assert f.mode == 0o644\n\n"}}},{"rowIdx":430,"cells":{"text":{"kind":"string","value":"\nfrom scattertext.TermDocMatrix import TermDocMatrix\n\nfrom scattertext.indexstore import IndexStore, IndexStoreFromList, IndexStoreFromDict\nfrom scipy.sparse import csr_matrix\n\n\nclass DimensionMismatchException(Exception):\n\tpass\n\n\nclass TermDocMatrixFromScikit(object):\n\t'''\n\tA factory class for building a TermDocMatrix from a scikit-learn-processed\n\tdataset.\n\n\t>>> from scattertext import TermDocMatrixFromScikit\n\t>>> from sklearn.datasets import fetch_20newsgroups\n\t>>> from sklearn.feature_extraction.text import CountVectorizer\n\t>>> newsgroups_train = fetch_20newsgroups(subset='train', remove=('headers', 'footers', 'quotes'))\n\t>>> count_vectorizer = CountVectorizer()\n\t>>> X_counts = count_vectorizer.fit_transform(newsgroups_train.data)\n\t>>> term_doc_mat = TermDocMatrixFromScikit(\n\t... X = X_counts,\n\t... y = newsgroups_train.target,\n\t... feature_vocabulary=count_vectorizer.vocabulary_,\n\t... category_names=newsgroups_train.target_names\n\t... ).build()\n\t>>> term_doc_mat.get_categories()[:2]\n\t['alt.atheism', 'comp.graphics']\n\t>>> term_doc_mat.get_term_freq_df().assign(score=term_doc_mat.get_scaled_f_scores('alt.atheism')).sort_values(by='score', ascending=False).index.tolist()[:5]\n\t['atheism', 'atheists', 'islam', 'atheist', 'matthew']\n\t'''\n\tdef __init__(self,\n\t X,\n\t y,\n\t feature_vocabulary,\n\t category_names,\n\t unigram_frequency_path=None):\n\t\t'''\n\t\tParameters\n\t\t----------\n\t\tX: sparse matrix integer, giving term-document-matrix counts\n\t\ty: list, integer categories\n\t\tfeature_vocabulary: dict (feat_name -> idx)\n\t\tcategory_names: list of category names (len of y)\n\t\tunigram_frequency_path: str (see TermDocMatrix)\n\t\t'''\n\n\t\tif X.shape != (len(y), len(feature_vocabulary)):\n\t\t\traise DimensionMismatchException('The shape of X is expected to be ' +\n\t\t\t str((len(y), len(feature_vocabulary))) +\n\t\t\t 'but was actually: ' + str(X.shape))\n\t\tself.X = X\n\t\tself.y = y\n\t\tself.feature_vocabulary = feature_vocabulary\n\t\tself.category_names = category_names\n\t\tself.unigram_frequency_path = unigram_frequency_path\n\n\tdef build(self):\n\t\t'''\n\t\tReturns\n\t\t-------\n\t\tTermDocMatrix\n\t\t'''\n\t\tconstructor_kwargs = self._get_build_kwargs()\n\t\treturn TermDocMatrix(\n\t\t\t**constructor_kwargs\n\t\t)\n\n\tdef _get_build_kwargs(self):\n\t\tconstructor_kwargs = {'X': self.X,\n\t\t 'mX': csr_matrix((0, 0)),\n\t\t 'y': self.y,\n\t\t 'term_idx_store': IndexStoreFromDict.build(self.feature_vocabulary),\n\t\t 'metadata_idx_store': IndexStore(),\n\t\t 'category_idx_store': IndexStoreFromList.build(self.category_names),\n\t\t 'unigram_frequency_path': self.unigram_frequency_path}\n\t\treturn constructor_kwargs\n\n"}}},{"rowIdx":431,"cells":{"text":{"kind":"string","value":"\nimport unittest\n\nimport openrazer_daemon.device\n\nDEVICE1_SERIAL = 'XX000000'\nDEVICE1_ID = '0000:0000:0000.0000'\n\nDEVICE2_SERIAL = 'XX000001'\nDEVICE2_ID = '0000:0000:0000.0001'\n\n\nclass DummyDBusObject(object):\n def __init__(self):\n self.notify_msg = None\n self.parent = None\n\n def notify(self, msg):\n self.notify_msg = msg\n\n def register_parent(self, parent):\n self.parent = parent\n\n def notify_parent(self, msg):\n self.parent.notify_parent(msg)\n\n\nclass DummyParentObject(object):\n def __init__(self):\n self.notify_msg = None\n self.notify_device = None\n\n def notify(self, device_object, msg):\n self.notify_device = device_object\n self.notify_msg = msg\n\n# TODO move device_object creation to setUp\n\n\nclass DeviceTest(unittest.TestCase):\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def test_device_properties(self):\n dbus_object = DummyDBusObject()\n device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n self.assertEqual(device_object.device_id, DEVICE1_ID)\n self.assertEqual(device_object.serial, DEVICE1_SERIAL)\n self.assertEqual(device_object.dbus, dbus_object)\n\n def test_device_register_parent(self):\n dbus_object = DummyDBusObject()\n parent_object = DummyParentObject()\n\n device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n device_object.register_parent(parent_object)\n\n self.assertEqual(device_object._parent, parent_object)\n\n def test_device_notify_child(self):\n msg = ('test', 1)\n\n dbus_object = DummyDBusObject()\n\n device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n device_object.notify_child(msg)\n\n self.assertEqual(dbus_object.notify_msg, msg)\n\n def test_device_notify_parent(self):\n msg = ('test', 1)\n\n dbus_object = DummyDBusObject()\n parent_object = DummyParentObject()\n\n device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n device_object.register_parent(parent_object)\n\n device_object.notify_parent(msg)\n\n self.assertEqual(parent_object.notify_msg, msg)\n self.assertEqual(parent_object.notify_device, device_object)\n\n\nclass DeviceCollectionTest(unittest.TestCase):\n def setUp(self):\n self.device_collection = openrazer_daemon.device.DeviceCollection()\n\n def test_add(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n self.assertIn(DEVICE1_ID, self.device_collection._id_map)\n self.assertIn(DEVICE1_SERIAL, self.device_collection._serial_map)\n\n device_obj_from_id = self.device_collection._id_map[DEVICE1_ID]\n device_obj_from_serial = self.device_collection._serial_map[DEVICE1_SERIAL]\n\n self.assertIs(device_obj_from_id, device_obj_from_serial)\n\n def test_get(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n device_obj_by_id = self.device_collection[DEVICE1_ID]\n device_obj_by_serial = self.device_collection[DEVICE1_SERIAL]\n\n self.assertIs(device_obj_by_id, device_obj_by_serial)\n\n def test_invalid_get(self):\n try:\n device = self.device_collection.get('INVALID')\n except IndexError:\n pass\n\n def test_contains(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n self.assertIn(DEVICE1_ID, self.device_collection)\n self.assertIn(DEVICE1_SERIAL, self.device_collection)\n\n def test_remove(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n self.assertIn(DEVICE1_ID, self.device_collection)\n\n self.device_collection.remove(DEVICE1_ID)\n self.assertNotIn(DEVICE1_ID, self.device_collection)\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n self.assertIn(DEVICE1_ID, self.device_collection)\n\n self.device_collection.remove(DEVICE1_SERIAL)\n self.assertNotIn(DEVICE1_SERIAL, self.device_collection)\n\n def test_items(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n device_id, device_obj1 = list(self.device_collection.id_items())[0]\n device_serial, device_obj2 = list(self.device_collection.serial_items())[0]\n\n self.assertEqual(device_id, DEVICE1_ID)\n self.assertEqual(device_serial, DEVICE1_SERIAL)\n self.assertIs(device_obj1, device_obj2)\n\n def test_iter(self):\n dbus_object = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)\n\n devices = [self.device_collection.get(DEVICE1_ID)]\n\n for device in self.device_collection:\n devices.remove(device)\n\n self.assertEqual(len(devices), 0)\n\n def test_serials(self):\n dbus_object1 = DummyDBusObject()\n dbus_object2 = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)\n self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)\n\n serials = self.device_collection.serials()\n\n self.assertIn(DEVICE1_SERIAL, serials)\n self.assertIn(DEVICE2_SERIAL, serials)\n\n def test_devices(self):\n dbus_object1 = DummyDBusObject()\n dbus_object2 = DummyDBusObject()\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)\n self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)\n\n device_list = self.device_collection.devices\n available_dbus = [dbus_object1, dbus_object2]\n\n for device in device_list:\n available_dbus.remove(device.dbus)\n\n # Ensure both dbus objects have been seen\n self.assertEqual(len(available_dbus), 0)\n\n def test_cross_device_notify(self):\n dbus_object1 = DummyDBusObject()\n dbus_object2 = DummyDBusObject()\n msg = ('test', 1)\n\n self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)\n self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)\n\n self.assertIs(dbus_object1.notify_msg, None)\n self.assertIs(dbus_object2.notify_msg, None)\n\n dbus_object1.notify_parent(msg)\n\n # Ensure message gets sent to other devices and not itself\n self.assertIs(dbus_object1.notify_msg, None)\n self.assertIs(dbus_object2.notify_msg, msg)\n\n"}}},{"rowIdx":432,"cells":{"text":{"kind":"string","value":"\nimport pytest\nimport pyvera as pv\nfrom requests.exceptions import RequestException\n\nfrom homeassistant.components.vera import (\n CONF_CONTROLLER,\n CONF_EXCLUDE,\n CONF_LIGHTS,\n DOMAIN,\n)\nfrom homeassistant.config_entries import ENTRY_STATE_NOT_LOADED\nfrom homeassistant.core import HomeAssistant\n\nfrom .common import ComponentFactory, ConfigSource, new_simple_controller_config\n\nfrom tests.async_mock import MagicMock\nfrom tests.common import MockConfigEntry, mock_registry\n\n\nasync def test_init(\n hass: HomeAssistant, vera_component_factory: ComponentFactory\n) -> None:\n \"\"\"Test function.\"\"\"\n vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device1.device_id = 1\n vera_device1.vera_device_id = vera_device1.device_id\n vera_device1.name = \"first_dev\"\n vera_device1.is_tripped = False\n entity1_id = \"binary_sensor.first_dev_1\"\n\n await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(\n config={CONF_CONTROLLER: \"http://127.0.0.1:111\"},\n config_source=ConfigSource.CONFIG_FLOW,\n serial_number=\"first_serial\",\n devices=(vera_device1,),\n ),\n )\n\n entity_registry = await hass.helpers.entity_registry.async_get_registry()\n entry1 = entity_registry.async_get(entity1_id)\n assert entry1\n assert entry1.unique_id == \"vera_first_serial_1\"\n\n\nasync def test_init_from_file(\n hass: HomeAssistant, vera_component_factory: ComponentFactory\n) -> None:\n \"\"\"Test function.\"\"\"\n vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device1.device_id = 1\n vera_device1.vera_device_id = vera_device1.device_id\n vera_device1.name = \"first_dev\"\n vera_device1.is_tripped = False\n entity1_id = \"binary_sensor.first_dev_1\"\n\n await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(\n config={CONF_CONTROLLER: \"http://127.0.0.1:111\"},\n config_source=ConfigSource.FILE,\n serial_number=\"first_serial\",\n devices=(vera_device1,),\n ),\n )\n\n entity_registry = await hass.helpers.entity_registry.async_get_registry()\n entry1 = entity_registry.async_get(entity1_id)\n assert entry1\n assert entry1.unique_id == \"vera_first_serial_1\"\n\n\nasync def test_multiple_controllers_with_legacy_one(\n hass: HomeAssistant, vera_component_factory: ComponentFactory\n) -> None:\n \"\"\"Test multiple controllers with one legacy controller.\"\"\"\n vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device1.device_id = 1\n vera_device1.vera_device_id = vera_device1.device_id\n vera_device1.name = \"first_dev\"\n vera_device1.is_tripped = False\n entity1_id = \"binary_sensor.first_dev_1\"\n\n vera_device2 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device2.device_id = 2\n vera_device2.vera_device_id = vera_device2.device_id\n vera_device2.name = \"second_dev\"\n vera_device2.is_tripped = False\n entity2_id = \"binary_sensor.second_dev_2\"\n\n # Add existing entity registry entry from previous setup.\n entity_registry = mock_registry(hass)\n entity_registry.async_get_or_create(\n domain=\"switch\", platform=DOMAIN, unique_id=\"12\"\n )\n\n await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(\n config={CONF_CONTROLLER: \"http://127.0.0.1:111\"},\n config_source=ConfigSource.FILE,\n serial_number=\"first_serial\",\n devices=(vera_device1,),\n ),\n )\n\n await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(\n config={CONF_CONTROLLER: \"http://127.0.0.1:222\"},\n config_source=ConfigSource.CONFIG_FLOW,\n serial_number=\"second_serial\",\n devices=(vera_device2,),\n ),\n )\n\n entity_registry = await hass.helpers.entity_registry.async_get_registry()\n\n entry1 = entity_registry.async_get(entity1_id)\n assert entry1\n assert entry1.unique_id == \"1\"\n\n entry2 = entity_registry.async_get(entity2_id)\n assert entry2\n assert entry2.unique_id == \"vera_second_serial_2\"\n\n\nasync def test_unload(\n hass: HomeAssistant, vera_component_factory: ComponentFactory\n) -> None:\n \"\"\"Test function.\"\"\"\n vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device1.device_id = 1\n vera_device1.vera_device_id = vera_device1.device_id\n vera_device1.name = \"first_dev\"\n vera_device1.is_tripped = False\n\n await vera_component_factory.configure_component(\n hass=hass, controller_config=new_simple_controller_config()\n )\n\n entries = hass.config_entries.async_entries(DOMAIN)\n assert entries\n\n for config_entry in entries:\n assert await hass.config_entries.async_unload(config_entry.entry_id)\n assert config_entry.state == ENTRY_STATE_NOT_LOADED\n\n\nasync def test_async_setup_entry_error(\n hass: HomeAssistant, vera_component_factory: ComponentFactory\n) -> None:\n \"\"\"Test function.\"\"\"\n\n def setup_callback(controller: pv.VeraController) -> None:\n controller.get_devices.side_effect = RequestException()\n controller.get_scenes.side_effect = RequestException()\n\n await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(setup_callback=setup_callback),\n )\n\n entry = MockConfigEntry(\n domain=DOMAIN,\n data={CONF_CONTROLLER: \"http://127.0.0.1\"},\n options={},\n unique_id=\"12345\",\n )\n entry.add_to_hass(hass)\n\n assert not await hass.config_entries.async_setup(entry.entry_id)\n\n\n@pytest.mark.parametrize(\n [\"options\"],\n [\n [{CONF_LIGHTS: [4, 10, 12, \"AAA\"], CONF_EXCLUDE: [1, \"BBB\"]}],\n [{CONF_LIGHTS: [\"4\", \"10\", \"12\", \"AAA\"], CONF_EXCLUDE: [\"1\", \"BBB\"]}],\n ],\n)\nasync def test_exclude_and_light_ids(\n hass: HomeAssistant, vera_component_factory: ComponentFactory, options\n) -> None:\n \"\"\"Test device exclusion, marking switches as lights and fixing the data type.\"\"\"\n vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device1.device_id = 1\n vera_device1.vera_device_id = 1\n vera_device1.name = \"dev1\"\n vera_device1.is_tripped = False\n entity_id1 = \"binary_sensor.dev1_1\"\n\n vera_device2 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor\n vera_device2.device_id = 2\n vera_device2.vera_device_id = 2\n vera_device2.name = \"dev2\"\n vera_device2.is_tripped = False\n entity_id2 = \"binary_sensor.dev2_2\"\n\n vera_device3 = MagicMock(spec=pv.VeraSwitch) # type: pv.VeraSwitch\n vera_device3.device_id = 3\n vera_device3.vera_device_id = 3\n vera_device3.name = \"dev3\"\n vera_device3.category = pv.CATEGORY_SWITCH\n vera_device3.is_switched_on = MagicMock(return_value=False)\n entity_id3 = \"switch.dev3_3\"\n\n vera_device4 = MagicMock(spec=pv.VeraSwitch) # type: pv.VeraSwitch\n vera_device4.device_id = 4\n vera_device4.vera_device_id = 4\n vera_device4.name = \"dev4\"\n vera_device4.category = pv.CATEGORY_SWITCH\n vera_device4.is_switched_on = MagicMock(return_value=False)\n entity_id4 = \"light.dev4_4\"\n\n component_data = await vera_component_factory.configure_component(\n hass=hass,\n controller_config=new_simple_controller_config(\n config_source=ConfigSource.CONFIG_ENTRY,\n devices=(vera_device1, vera_device2, vera_device3, vera_device4),\n config={**{CONF_CONTROLLER: \"http://127.0.0.1:123\"}, **options},\n ),\n )\n\n # Assert the entries were setup correctly.\n config_entry = next(iter(hass.config_entries.async_entries(DOMAIN)))\n assert config_entry.options[CONF_LIGHTS] == [4, 10, 12]\n assert config_entry.options[CONF_EXCLUDE] == [1]\n\n update_callback = component_data.controller_data[0].update_callback\n\n update_callback(vera_device1)\n update_callback(vera_device2)\n update_callback(vera_device3)\n update_callback(vera_device4)\n await hass.async_block_till_done()\n\n assert hass.states.get(entity_id1) is None\n assert hass.states.get(entity_id2) is not None\n assert hass.states.get(entity_id3) is not None\n assert hass.states.get(entity_id4) is not None\n\n"}}},{"rowIdx":433,"cells":{"text":{"kind":"string","value":"\nimport os\n\nimport pytest\nimport numpy as np\n\nfrom mne.viz.backends.tests._utils import (skips_if_not_mayavi,\n skips_if_not_pyvista)\nfrom mne.viz.backends._utils import ALLOWED_QUIVER_MODES\n\n\n@pytest.fixture\ndef backend_mocker():\n \"\"\"Help to test set up 3d backend.\"\"\"\n from mne.viz.backends import renderer\n del renderer.MNE_3D_BACKEND\n yield\n renderer.MNE_3D_BACKEND = None\n\n\n@pytest.mark.parametrize('backend', [\n pytest.param('mayavi', marks=skips_if_not_mayavi),\n pytest.param('pyvista', marks=skips_if_not_pyvista),\n pytest.param('foo', marks=pytest.mark.xfail(raises=ValueError)),\n])\ndef test_backend_environment_setup(backend, backend_mocker, monkeypatch):\n \"\"\"Test set up 3d backend based on env.\"\"\"\n monkeypatch.setenv(\"MNE_3D_BACKEND\", backend)\n assert os.environ['MNE_3D_BACKEND'] == backend # just double-check\n\n # reload the renderer to check if the 3d backend selection by\n # environment variable has been updated correctly\n from mne.viz.backends import renderer\n renderer.set_3d_backend(backend)\n assert renderer.MNE_3D_BACKEND == backend\n assert renderer.get_3d_backend() == backend\n\n\ndef test_3d_functions(renderer):\n \"\"\"Test figure management functions.\"\"\"\n fig = renderer.create_3d_figure((300, 300))\n # Mayavi actually needs something in the display to set the title\n wrap_renderer = renderer.backend._Renderer(fig=fig)\n wrap_renderer.sphere(np.array([0., 0., 0.]), 'w', 1.)\n renderer.backend._check_3d_figure(fig)\n renderer.backend._set_3d_view(figure=fig, azimuth=None, elevation=None,\n focalpoint=(0., 0., 0.), distance=None)\n renderer.backend._set_3d_title(figure=fig, title='foo')\n renderer.backend._take_3d_screenshot(figure=fig)\n renderer.backend._close_all()\n\n\ndef test_3d_backend(renderer):\n \"\"\"Test default plot.\"\"\"\n # set data\n win_size = (600, 600)\n win_color = 'black'\n\n tet_size = 1.0\n tet_x = np.array([0, tet_size, 0, 0])\n tet_y = np.array([0, 0, tet_size, 0])\n tet_z = np.array([0, 0, 0, tet_size])\n tet_indices = np.array([[0, 1, 2],\n [0, 1, 3],\n [0, 2, 3],\n [1, 2, 3]])\n tet_color = 'white'\n\n sph_center = np.column_stack((tet_x, tet_y, tet_z))\n sph_color = 'red'\n sph_scale = tet_size / 3.0\n\n ct_scalars = np.array([0.0, 0.0, 0.0, 1.0])\n ct_levels = [0.2, 0.4, 0.6, 0.8]\n ct_surface = {\n \"rr\": sph_center,\n \"tris\": tet_indices\n }\n\n qv_color = 'blue'\n qv_scale = tet_size / 2.0\n qv_center = np.array([np.mean((sph_center[va, :],\n sph_center[vb, :],\n sph_center[vc, :]), axis=0)\n for (va, vb, vc) in tet_indices])\n center = np.mean(qv_center, axis=0)\n qv_dir = qv_center - center\n qv_scale_mode = 'scalar'\n qv_scalars = np.linspace(1.0, 2.0, 4)\n\n txt_x = 0.0\n txt_y = 0.0\n txt_text = \"renderer\"\n txt_size = 14\n\n cam_distance = 5 * tet_size\n\n # init scene\n rend = renderer.create_3d_figure(\n size=win_size,\n bgcolor=win_color,\n smooth_shading=True,\n scene=False,\n )\n for interaction in ('terrain', 'trackball'):\n rend.set_interaction(interaction)\n\n # use mesh\n mesh_data = rend.mesh(\n x=tet_x,\n y=tet_y,\n z=tet_z,\n triangles=tet_indices,\n color=tet_color,\n )\n rend.remove_mesh(mesh_data)\n\n # use contour\n rend.contour(surface=ct_surface, scalars=ct_scalars,\n contours=ct_levels, kind='line')\n rend.contour(surface=ct_surface, scalars=ct_scalars,\n contours=ct_levels, kind='tube')\n\n # use sphere\n rend.sphere(center=sph_center, color=sph_color,\n scale=sph_scale, radius=1.0)\n\n # use quiver3d\n kwargs = dict(\n x=qv_center[:, 0],\n y=qv_center[:, 1],\n z=qv_center[:, 2],\n u=qv_dir[:, 0],\n v=qv_dir[:, 1],\n w=qv_dir[:, 2],\n color=qv_color,\n scale=qv_scale,\n scale_mode=qv_scale_mode,\n scalars=qv_scalars,\n )\n for mode in ALLOWED_QUIVER_MODES:\n rend.quiver3d(mode=mode, **kwargs)\n with pytest.raises(ValueError, match='Invalid value'):\n rend.quiver3d(mode='foo', **kwargs)\n\n # use tube\n rend.tube(origin=np.array([[0, 0, 0]]),\n destination=np.array([[0, 1, 0]]))\n tube = rend.tube(origin=np.array([[1, 0, 0]]),\n destination=np.array([[1, 1, 0]]),\n scalars=np.array([[1.0, 1.0]]))\n\n # scalar bar\n rend.scalarbar(source=tube, title=\"Scalar Bar\",\n bgcolor=[1, 1, 1])\n\n # use text\n rend.text2d(x_window=txt_x, y_window=txt_y, text=txt_text,\n size=txt_size, justification='right')\n rend.text3d(x=0, y=0, z=0, text=txt_text, scale=1.0)\n rend.set_camera(azimuth=180.0, elevation=90.0,\n distance=cam_distance,\n focalpoint=center)\n rend.reset_camera()\n rend.show()\n\n\ndef test_get_3d_backend(renderer):\n \"\"\"Test get_3d_backend function call for side-effects.\"\"\"\n # Test twice to ensure the first call had no side-effect\n orig_backend = renderer.MNE_3D_BACKEND\n assert renderer.get_3d_backend() == orig_backend\n assert renderer.get_3d_backend() == orig_backend\n\n"}}},{"rowIdx":434,"cells":{"text":{"kind":"string","value":"\nimport diamond.collector\nimport telnetlib\nimport time\n\n\nclass MogilefsCollector(diamond.collector.Collector):\n\n def get_default_config_help(self):\n config_help = super(MogilefsCollector, self).get_default_config_help()\n config_help.update({\n 'path': 'Metric path',\n })\n return config_help\n\n def get_default_config(self):\n config = super(MogilefsCollector, self).get_default_config()\n config.update({\n 'path': 'mogilefs'\n })\n return config\n\n def collect(self):\n tn = telnetlib.Telnet(\"127.0.0.1\", 7001, 3)\n time.sleep(1)\n tn.write(\"!stats\" + '\\r\\n')\n out = tn.read_until('.', 3)\n\n myvars = {}\n\n for line in out.splitlines()[:-1]:\n name, var = line.partition(\" \")[::2]\n myvars[name.strip()] = long(var)\n\n for key, value in myvars.iteritems():\n # Set Metric Name\n metric_name = key\n # Set Metric Value\n metric_value = value\n # Publish Metric\n self.publish(metric_name, metric_value)\n\n"}}},{"rowIdx":435,"cells":{"text":{"kind":"string","value":"\nfrom typing import Optional\n\nfrom homeassistant.components.device_tracker import SOURCE_TYPE_GPS\nfrom homeassistant.components.device_tracker.config_entry import TrackerEntity\n\nfrom . import DOMAIN as TESLA_DOMAIN, TeslaDevice\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the Tesla binary_sensors by config_entry.\"\"\"\n entities = [\n TeslaDeviceEntity(\n device,\n hass.data[TESLA_DOMAIN][config_entry.entry_id][\"coordinator\"],\n )\n for device in hass.data[TESLA_DOMAIN][config_entry.entry_id][\"devices\"][\n \"devices_tracker\"\n ]\n ]\n async_add_entities(entities, True)\n\n\nclass TeslaDeviceEntity(TeslaDevice, TrackerEntity):\n \"\"\"A class representing a Tesla device.\"\"\"\n\n @property\n def latitude(self) -> Optional[float]:\n \"\"\"Return latitude value of the device.\"\"\"\n location = self.tesla_device.get_location()\n return self.tesla_device.get_location().get(\"latitude\") if location else None\n\n @property\n def longitude(self) -> Optional[float]:\n \"\"\"Return longitude value of the device.\"\"\"\n location = self.tesla_device.get_location()\n return self.tesla_device.get_location().get(\"longitude\") if location else None\n\n @property\n def source_type(self):\n \"\"\"Return the source type, eg gps or router, of the device.\"\"\"\n return SOURCE_TYPE_GPS\n\n @property\n def device_state_attributes(self):\n \"\"\"Return the state attributes of the device.\"\"\"\n attr = super().device_state_attributes.copy()\n location = self.tesla_device.get_location()\n if location:\n attr.update(\n {\n \"trackr_id\": self.unique_id,\n \"heading\": location[\"heading\"],\n \"speed\": location[\"speed\"],\n }\n )\n return attr\n\n"}}},{"rowIdx":436,"cells":{"text":{"kind":"string","value":"\nfrom __future__ import unicode_literals\n\nimport os\nfrom lib.fun.fun import cool\nfrom core.CONF import build_conf_dic\nfrom core.PATTERN import build_pattern_dic\nfrom lib.data.data import paths, pyoptions\n\n\ndef plug_parser():\n if pyoptions.args_plug[0] not in pyoptions.plug_range:\n exit(\"[!] Choose plugin from ({0})\".format(cool.fuchsia(\",\".join(pyoptions.plug_range))))\n else:\n pyoptions.plugins_operator.get(pyoptions.args_plug[0])(pyoptions.args_plug)\n\n\ndef conf_parser():\n if pyoptions.args_conf == 'const':\n if os.path.isfile(paths.buildconf_path):\n build_conf_dic(source=paths.buildconf_path)\n else:\n paths.buildconf_path = pyoptions.args_conf\n build_conf_dic(source=paths.buildconf_path)\n\n\ndef pattern_parser():\n build_pattern_dic(source=pyoptions.args_pattern)\n\n\ndef tool_parser():\n if len(pyoptions.args_tool) >= 1:\n if pyoptions.args_tool[0] in pyoptions.tool_range:\n pyoptions.tools_operator.get(pyoptions.args_tool[0])(pyoptions.args_tool)\n else:\n exit(pyoptions.CRLF + \"[!] Choose tool from ({})\".format(cool.fuchsia(\" \".join(pyoptions.tool_range))))\n else:\n exit(pyoptions.CRLF + cool.red(\"[-] Please specified tool name\"))\n\n"}}},{"rowIdx":437,"cells":{"text":{"kind":"string","value":"\nfrom datetime import datetime as dt\n\nimport pytz\nfrom mock import patch\nfrom pandas.util.testing import assert_frame_equal\n\nfrom arctic.date._mktz import mktz\nfrom tests.util import read_str_as_pandas, multi_index_df_from_arrs\n\npytest_plugins = ['arctic.fixtures.arctic']\n\n\nts1 = read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 2.5\n 2012-11-08 17:06:11.040 | 3.0\"\"\")\n\nts1_update = read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 2.5\n 2012-11-08 17:06:11.040 | 3.0\n 2012-11-09 17:06:11.040 | 3.5\"\"\")\n\nLOCAL_TZ = mktz()\n\n\ndef test_new_ts_read_write(bitemporal_library):\n bitemporal_library.update('spam', ts1)\n assert_frame_equal(ts1, bitemporal_library.read('spam').data)\n\n\ndef test_read_ts_raw(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('UTC')))\n assert_frame_equal(bitemporal_library.read('spam', raw=True).data.tz_convert(tz=mktz('UTC'), level=1), read_str_as_pandas(\n \"\"\" sample_dt | observed_dt | near\n 2012-09-08 17:06:11.040 | 2015-05-01 | 1.0\n 2012-10-08 17:06:11.040 | 2015-05-01 | 2.0\n 2012-10-09 17:06:11.040 | 2015-05-01 | 2.5\n 2012-11-08 17:06:11.040 | 2015-05-01 | 3.0\"\"\", num_index=2).tz_localize(tz=mktz('UTC'), level=1))\n\n\ndef test_write_ts_with_column_name_same_as_observed_dt_ok(bitemporal_library):\n ts1 = read_str_as_pandas(\"\"\" sample_dt | observed_dt | near\n 2012-09-08 17:06:11.040 | 2015-1-1 | 1.0\n 2012-10-08 17:06:11.040 | 2015-1-1 | 2.0\n 2012-10-09 17:06:11.040 | 2015-1-1 | 2.5\n 2012-11-08 17:06:11.040 | 2015-1-1 | 3.0\"\"\")\n bitemporal_library.update('spam', ts1)\n assert_frame_equal(ts1, bitemporal_library.read('spam').data)\n\n\ndef test_last_update(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 1, 1))\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 1, 2))\n assert bitemporal_library.read('spam').last_updated == dt(2015, 1, 2, tzinfo=LOCAL_TZ)\n\n\ndef test_existing_ts_update_and_read(bitemporal_library):\n bitemporal_library.update('spam', ts1)\n bitemporal_library.update('spam', ts1_update[-1:])\n assert_frame_equal(ts1_update, bitemporal_library.read('spam').data)\n\n\ndef test_existing_ts_update_existing_data_and_read(bitemporal_library):\n bitemporal_library.update('spam', ts1)\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-09 17:06:11.040 | 4.2\"\"\"))\n expected_ts = read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 4.2\n 2012-11-08 17:06:11.040 | 3.0\"\"\")\n assert_frame_equal(expected_ts, bitemporal_library.read('spam').data)\n\n\ndef test_read_ts_with_historical_update(bitemporal_library):\n with patch('arctic.store.bitemporal_store.dt') as mock_dt:\n mock_dt.now.return_value = dt(2015, 5, 1)\n mock_dt.side_effect = lambda *args, **kwargs: dt(*args, **kwargs)\n bitemporal_library.update('spam', ts1)\n\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-09 17:06:11.040 | 4.2\"\"\"),\n as_of=dt(2015, 5, 2))\n\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-09 17:06:11.040 | 6.6\"\"\"),\n as_of=dt(2015, 5, 3))\n assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 2, 10, tzinfo=pytz.timezone(\"Europe/London\"))).data, read_str_as_pandas(\n \"\"\"sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 4.2\n 2012-11-08 17:06:11.040 | 3.0\"\"\"))\n\n assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 6.6\n 2012-11-08 17:06:11.040 | 3.0\"\"\"))\n\n assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 1, 10, tzinfo=pytz.timezone(\"Europe/London\"))).data, ts1)\n\n\ndef test_read_ts_with_historical_update_and_new_row(bitemporal_library):\n with patch('arctic.store.bitemporal_store.dt') as mock_dt:\n mock_dt.now.return_value = dt(2015, 5, 1)\n mock_dt.side_effect = lambda *args, **kwargs: dt(*args, **kwargs)\n bitemporal_library.update('spam', ts1)\n\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-09 17:06:11.040 | 4.2\n 2012-12-01 17:06:11.040 | 100\"\"\"),\n as_of=dt(2015, 5, 2))\n\n assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 4.2\n 2012-11-08 17:06:11.040 | 3.0\n 2012-12-01 17:06:11.040 | 100\"\"\"))\n\n assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 1, 10)).data, ts1)\n\n\ndef test_insert_new_rows_in_middle_remains_sorted(bitemporal_library):\n bitemporal_library.update('spam', ts1)\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-09 12:00:00.000 | 30.0\n 2012-12-01 17:06:11.040 | 100\"\"\"))\n\n assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 12:00:00.000 | 30.0\n 2012-10-09 17:06:11.040 | 2.5\n 2012-11-08 17:06:11.040 | 3.0\n 2012-12-01 17:06:11.040 | 100\"\"\"))\n\n\ndef test_insert_versions_inbetween_works_ok(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1))\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-12-01 17:06:11.040 | 100\"\"\"),\n as_of=dt(2015, 5, 10))\n\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-12-01 17:06:11.040 | 25\"\"\"),\n as_of=dt(2015, 5, 8))\n\n assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(\"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 2.5\n 2012-11-08 17:06:11.040 | 3.0\n 2012-12-01 17:06:11.040 | 100\"\"\"))\n\n assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 9)).data, read_str_as_pandas(\n \"\"\" sample_dt | near\n 2012-09-08 17:06:11.040 | 1.0\n 2012-10-08 17:06:11.040 | 2.0\n 2012-10-09 17:06:11.040 | 2.5\n 2012-11-08 17:06:11.040 | 3.0\n 2012-12-01 17:06:11.040 | 25\"\"\"))\n\n\ndef test_read_ts_raw_all_version_ok(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('UTC')))\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-12-01 17:06:11.040 | 25\"\"\"),\n as_of=dt(2015, 5, 5, tzinfo=mktz('UTC')))\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-11-08 17:06:11.040 | 42\"\"\"),\n as_of=dt(2015, 5, 3, tzinfo=mktz('UTC')))\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-10-08 17:06:11.040 | 42\n 2013-01-01 17:06:11.040 | 100\"\"\"),\n as_of=dt(2015, 5, 10, tzinfo=mktz('UTC')))\n assert_frame_equal(bitemporal_library.read('spam', raw=True).data.tz_localize(tz=None, level=1), read_str_as_pandas(\n \"\"\" sample_dt | observed_dt | near\n 2012-09-08 17:06:11.040 | 2015-05-01 | 1.0\n 2012-10-08 17:06:11.040 | 2015-05-01 | 2.0\n 2012-10-08 17:06:11.040 | 2015-05-10 | 42\n 2012-10-09 17:06:11.040 | 2015-05-01 | 2.5\n 2012-11-08 17:06:11.040 | 2015-05-01 | 3.0\n 2012-11-08 17:06:11.040 | 2015-05-03 | 42\n 2012-12-01 17:06:11.040 | 2015-05-05 | 25\n 2013-01-01 17:06:11.040 | 2015-05-10 | 100\"\"\", num_index=2))\n\n\ndef test_bitemporal_store_saves_as_of_with_timezone(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1))\n df = bitemporal_library.read('spam', raw=True).data\n assert all([x[1] == dt(2015, 5, 1, tzinfo=LOCAL_TZ) for x in df.index])\n\n\ndef test_bitemporal_store_read_as_of_timezone(bitemporal_library):\n bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('Europe/London')))\n bitemporal_library.update('spam', read_str_as_pandas(\"\"\" sample_dt | near\n 2012-12-01 17:06:11.040 | 25\"\"\"),\n as_of=dt(2015, 5, 2, tzinfo=mktz('Europe/London')))\n df = bitemporal_library.read('spam', as_of=dt(2015, 5, 2, tzinfo=mktz('Asia/Hong_Kong'))).data\n assert_frame_equal(df, ts1)\n\n\ndef test_multi_index_ts_read_write(bitemporal_library):\n ts = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2'),\n index_arrs=[\n ['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],\n ['SPAM Index'] * 4\n ],\n data_dict={'near': [1.0, 2.0, 2.5, 3.0]}\n )\n bitemporal_library.update('spam', ts)\n assert_frame_equal(ts, bitemporal_library.read('spam').data)\n\n\ndef test_multi_index_ts_read_raw(bitemporal_library):\n ts = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2'),\n index_arrs=[\n ['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],\n ['SPAM Index'] * 4\n ],\n data_dict={'near': [1.0, 2.0, 2.5, 3.0]}\n )\n\n expected_ts = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2', 'observed_dt'),\n index_arrs=[\n ['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],\n ['SPAM Index'] * 4,\n ['2015-01-01'] * 4,\n ],\n data_dict={'near': [1.0, 2.0, 2.5, 3.0]}\n )\n bitemporal_library.update('spam', ts, as_of=dt(2015, 1, 1))\n assert_frame_equal(expected_ts.tz_localize(tz=LOCAL_TZ, level=2), bitemporal_library.read('spam', raw=True).data)\n\n\ndef test_multi_index_update(bitemporal_library):\n sample_timerange = list(sorted(['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'] * 2))\n ts = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2'),\n index_arrs=[\n sample_timerange,\n ['SPAM Index', 'EGG Index'] * 4\n ],\n data_dict={'near': [1.0, 1.1, 2.0, 2.1, 2.5, 2.6, 3.0, 3.1]}\n )\n\n ts2 = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2'),\n index_arrs=[\n ['2012-09-08 17:06:11.040', '2012-09-08 17:06:11.040', '2012-12-08 17:06:11.040'],\n ['SPAM Index', 'EGG Index', 'SPAM Index'],\n ],\n data_dict={'near': [1.2, 1.6, 4.0]}\n )\n\n expected_ts = multi_index_df_from_arrs(\n index_headers=('index 1', 'index 2'),\n index_arrs=[\n sample_timerange + ['2012-12-08 17:06:11.040'],\n ['EGG Index', 'SPAM Index'] * 4 + ['SPAM Index']\n ],\n data_dict={'near': [1.6, 1.2, 2.1, 2.0, 2.6, 2.5, 3.1, 3.0, 4.0]}\n )\n bitemporal_library.update('spam', ts, as_of=dt(2015, 1, 1))\n bitemporal_library.update('spam', ts2, as_of=dt(2015, 1, 2))\n assert_frame_equal(expected_ts, bitemporal_library.read('spam').data)\n assert bitemporal_library.read('spam').last_updated == dt(2015, 1, 2, tzinfo=LOCAL_TZ)\n\n"}}},{"rowIdx":438,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.components.abode import ATTR_DEVICE_ID\nfrom homeassistant.components.lock import DOMAIN as LOCK_DOMAIN\nfrom homeassistant.const import (\n ATTR_ENTITY_ID,\n ATTR_FRIENDLY_NAME,\n SERVICE_LOCK,\n SERVICE_UNLOCK,\n STATE_LOCKED,\n)\n\nfrom .common import setup_platform\n\nfrom tests.async_mock import patch\n\nDEVICE_ID = \"lock.test_lock\"\n\n\nasync def test_entity_registry(hass):\n \"\"\"Tests that the devices are registered in the entity registry.\"\"\"\n await setup_platform(hass, LOCK_DOMAIN)\n entity_registry = await hass.helpers.entity_registry.async_get_registry()\n\n entry = entity_registry.async_get(DEVICE_ID)\n assert entry.unique_id == \"51cab3b545d2o34ed7fz02731bda5324\"\n\n\nasync def test_attributes(hass):\n \"\"\"Test the lock attributes are correct.\"\"\"\n await setup_platform(hass, LOCK_DOMAIN)\n\n state = hass.states.get(DEVICE_ID)\n assert state.state == STATE_LOCKED\n assert state.attributes.get(ATTR_DEVICE_ID) == \"ZW:00000004\"\n assert not state.attributes.get(\"battery_low\")\n assert not state.attributes.get(\"no_response\")\n assert state.attributes.get(\"device_type\") == \"Door Lock\"\n assert state.attributes.get(ATTR_FRIENDLY_NAME) == \"Test Lock\"\n\n\nasync def test_lock(hass):\n \"\"\"Test the lock can be locked.\"\"\"\n await setup_platform(hass, LOCK_DOMAIN)\n\n with patch(\"abodepy.AbodeLock.lock\") as mock_lock:\n await hass.services.async_call(\n LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True\n )\n await hass.async_block_till_done()\n mock_lock.assert_called_once()\n\n\nasync def test_unlock(hass):\n \"\"\"Test the lock can be unlocked.\"\"\"\n await setup_platform(hass, LOCK_DOMAIN)\n\n with patch(\"abodepy.AbodeLock.unlock\") as mock_unlock:\n await hass.services.async_call(\n LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True\n )\n await hass.async_block_till_done()\n mock_unlock.assert_called_once()\n\n"}}},{"rowIdx":439,"cells":{"text":{"kind":"string","value":"\nimport voluptuous as vol\n\nfrom homeassistant.components.cover import (\n ATTR_POSITION,\n DEVICE_CLASSES_SCHEMA,\n PLATFORM_SCHEMA,\n SUPPORT_CLOSE,\n SUPPORT_OPEN,\n SUPPORT_SET_POSITION,\n SUPPORT_STOP,\n CoverEntity,\n)\nfrom homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME\nimport homeassistant.helpers.config_validation as cv\n\nfrom . import (\n CONF_ADS_VAR,\n CONF_ADS_VAR_POSITION,\n DATA_ADS,\n STATE_KEY_POSITION,\n STATE_KEY_STATE,\n AdsEntity,\n)\n\nDEFAULT_NAME = \"ADS Cover\"\n\nCONF_ADS_VAR_SET_POS = \"adsvar_set_position\"\nCONF_ADS_VAR_OPEN = \"adsvar_open\"\nCONF_ADS_VAR_CLOSE = \"adsvar_close\"\nCONF_ADS_VAR_STOP = \"adsvar_stop\"\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Optional(CONF_ADS_VAR): cv.string,\n vol.Optional(CONF_ADS_VAR_POSITION): cv.string,\n vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,\n vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,\n vol.Optional(CONF_ADS_VAR_OPEN): cv.string,\n vol.Optional(CONF_ADS_VAR_STOP): cv.string,\n vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,\n vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,\n }\n)\n\n\ndef setup_platform(hass, config, add_entities, discovery_info=None):\n \"\"\"Set up the cover platform for ADS.\"\"\"\n ads_hub = hass.data[DATA_ADS]\n\n ads_var_is_closed = config.get(CONF_ADS_VAR)\n ads_var_position = config.get(CONF_ADS_VAR_POSITION)\n ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS)\n ads_var_open = config.get(CONF_ADS_VAR_OPEN)\n ads_var_close = config.get(CONF_ADS_VAR_CLOSE)\n ads_var_stop = config.get(CONF_ADS_VAR_STOP)\n name = config[CONF_NAME]\n device_class = config.get(CONF_DEVICE_CLASS)\n\n add_entities(\n [\n AdsCover(\n ads_hub,\n ads_var_is_closed,\n ads_var_position,\n ads_var_pos_set,\n ads_var_open,\n ads_var_close,\n ads_var_stop,\n name,\n device_class,\n )\n ]\n )\n\n\nclass AdsCover(AdsEntity, CoverEntity):\n \"\"\"Representation of ADS cover.\"\"\"\n\n def __init__(\n self,\n ads_hub,\n ads_var_is_closed,\n ads_var_position,\n ads_var_pos_set,\n ads_var_open,\n ads_var_close,\n ads_var_stop,\n name,\n device_class,\n ):\n \"\"\"Initialize AdsCover entity.\"\"\"\n super().__init__(ads_hub, name, ads_var_is_closed)\n if self._ads_var is None:\n if ads_var_position is not None:\n self._unique_id = ads_var_position\n elif ads_var_pos_set is not None:\n self._unique_id = ads_var_pos_set\n elif ads_var_open is not None:\n self._unique_id = ads_var_open\n\n self._state_dict[STATE_KEY_POSITION] = None\n self._ads_var_position = ads_var_position\n self._ads_var_pos_set = ads_var_pos_set\n self._ads_var_open = ads_var_open\n self._ads_var_close = ads_var_close\n self._ads_var_stop = ads_var_stop\n self._device_class = device_class\n\n async def async_added_to_hass(self):\n \"\"\"Register device notification.\"\"\"\n if self._ads_var is not None:\n await self.async_initialize_device(\n self._ads_var, self._ads_hub.PLCTYPE_BOOL\n )\n\n if self._ads_var_position is not None:\n await self.async_initialize_device(\n self._ads_var_position, self._ads_hub.PLCTYPE_BYTE, STATE_KEY_POSITION\n )\n\n @property\n def device_class(self):\n \"\"\"Return the class of this cover.\"\"\"\n return self._device_class\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed.\"\"\"\n if self._ads_var is not None:\n return self._state_dict[STATE_KEY_STATE]\n if self._ads_var_position is not None:\n return self._state_dict[STATE_KEY_POSITION] == 0\n return None\n\n @property\n def current_cover_position(self):\n \"\"\"Return current position of cover.\"\"\"\n return self._state_dict[STATE_KEY_POSITION]\n\n @property\n def supported_features(self):\n \"\"\"Flag supported features.\"\"\"\n supported_features = SUPPORT_OPEN | SUPPORT_CLOSE\n\n if self._ads_var_stop is not None:\n supported_features |= SUPPORT_STOP\n\n if self._ads_var_pos_set is not None:\n supported_features |= SUPPORT_SET_POSITION\n\n return supported_features\n\n def stop_cover(self, **kwargs):\n \"\"\"Fire the stop action.\"\"\"\n if self._ads_var_stop:\n self._ads_hub.write_by_name(\n self._ads_var_stop, True, self._ads_hub.PLCTYPE_BOOL\n )\n\n def set_cover_position(self, **kwargs):\n \"\"\"Set cover position.\"\"\"\n position = kwargs[ATTR_POSITION]\n if self._ads_var_pos_set is not None:\n self._ads_hub.write_by_name(\n self._ads_var_pos_set, position, self._ads_hub.PLCTYPE_BYTE\n )\n\n def open_cover(self, **kwargs):\n \"\"\"Move the cover up.\"\"\"\n if self._ads_var_open is not None:\n self._ads_hub.write_by_name(\n self._ads_var_open, True, self._ads_hub.PLCTYPE_BOOL\n )\n elif self._ads_var_pos_set is not None:\n self.set_cover_position(position=100)\n\n def close_cover(self, **kwargs):\n \"\"\"Move the cover down.\"\"\"\n if self._ads_var_close is not None:\n self._ads_hub.write_by_name(\n self._ads_var_close, True, self._ads_hub.PLCTYPE_BOOL\n )\n elif self._ads_var_pos_set is not None:\n self.set_cover_position(position=0)\n\n @property\n def available(self):\n \"\"\"Return False if state has not been updated yet.\"\"\"\n if self._ads_var is not None or self._ads_var_position is not None:\n return (\n self._state_dict[STATE_KEY_STATE] is not None\n or self._state_dict[STATE_KEY_POSITION] is not None\n )\n return True\n\n"}}},{"rowIdx":440,"cells":{"text":{"kind":"string","value":"\nimport unittest\n\nimport mock\n\nfrom docker_registry.lib.index import db\n\n\nclass TestVersion(unittest.TestCase):\n\n def setUp(self):\n self.version = db.Version()\n\n def test_repr(self):\n self.assertEqual(type(repr(self.version)), str)\n\n\nclass TestRepository(unittest.TestCase):\n\n def setUp(self):\n self.repository = db.Repository()\n\n def test_repr(self):\n self.assertEqual(type(repr(self.repository)), str)\n\n\nclass TestSQLAlchemyIndex(unittest.TestCase):\n\n def setUp(self):\n self.index = db.SQLAlchemyIndex(database=\"sqlite://\")\n\n @mock.patch('sqlalchemy.engine.Engine.has_table', return_value=True)\n @mock.patch('sqlalchemy.orm.query.Query.first')\n def test_setup_database(self, first, has_table):\n first = mock.Mock( # noqa\n side_effect=db.sqlalchemy.exc.OperationalError)\n self.assertRaises(\n NotImplementedError, db.SQLAlchemyIndex, database=\"sqlite://\")\n\n"}}},{"rowIdx":441,"cells":{"text":{"kind":"string","value":"\nrevision = \"1db4f82bc780\"\ndown_revision = \"3adfdd6598df\"\n\nfrom alembic import op\n\nfrom flask import current_app\nfrom logging import Formatter, FileHandler, getLogger\n\nlog = getLogger(__name__)\nhandler = FileHandler(current_app.config.get(\"LOG_UPGRADE_FILE\", \"db_upgrade.log\"))\nhandler.setFormatter(\n Formatter(\n \"%(asctime)s %(levelname)s: %(message)s \" \"[in %(pathname)s:%(lineno)d]\"\n )\n)\nhandler.setLevel(current_app.config.get(\"LOG_LEVEL\", \"DEBUG\"))\nlog.setLevel(current_app.config.get(\"LOG_LEVEL\", \"DEBUG\"))\nlog.addHandler(handler)\n\n\ndef upgrade():\n connection = op.get_bind()\n\n result = connection.execute(\n \"\"\"\\\n UPDATE certificates\n SET rotation_policy_id=(SELECT id FROM rotation_policies WHERE name='default')\n WHERE rotation_policy_id IS NULL\n RETURNING id\n \"\"\"\n )\n log.info(\"Filled rotation_policy for %d certificates\" % result.rowcount)\n\n\ndef downgrade():\n pass\n\n"}}},{"rowIdx":442,"cells":{"text":{"kind":"string","value":"\nimport pytest\n\nfrom homeassistant.components.homekit.const import ATTR_VALUE\nfrom homeassistant.components.homekit.type_locks import Lock\nfrom homeassistant.components.lock import DOMAIN\nfrom homeassistant.const import (\n ATTR_CODE,\n ATTR_ENTITY_ID,\n STATE_LOCKED,\n STATE_UNKNOWN,\n STATE_UNLOCKED,\n)\n\nfrom tests.common import async_mock_service\n\n\nasync def test_lock_unlock(hass, hk_driver, events):\n \"\"\"Test if accessory and HA are updated accordingly.\"\"\"\n code = \"1234\"\n config = {ATTR_CODE: code}\n entity_id = \"lock.kitchen_door\"\n\n hass.states.async_set(entity_id, None)\n await hass.async_block_till_done()\n acc = Lock(hass, hk_driver, \"Lock\", entity_id, 2, config)\n await acc.run_handler()\n\n assert acc.aid == 2\n assert acc.category == 6 # DoorLock\n\n assert acc.char_current_state.value == 3\n assert acc.char_target_state.value == 1\n\n hass.states.async_set(entity_id, STATE_LOCKED)\n await hass.async_block_till_done()\n assert acc.char_current_state.value == 1\n assert acc.char_target_state.value == 1\n\n hass.states.async_set(entity_id, STATE_UNLOCKED)\n await hass.async_block_till_done()\n assert acc.char_current_state.value == 0\n assert acc.char_target_state.value == 0\n\n hass.states.async_set(entity_id, STATE_UNKNOWN)\n await hass.async_block_till_done()\n assert acc.char_current_state.value == 3\n assert acc.char_target_state.value == 0\n\n hass.states.async_remove(entity_id)\n await hass.async_block_till_done()\n assert acc.char_current_state.value == 3\n assert acc.char_target_state.value == 0\n\n # Set from HomeKit\n call_lock = async_mock_service(hass, DOMAIN, \"lock\")\n call_unlock = async_mock_service(hass, DOMAIN, \"unlock\")\n\n await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)\n await hass.async_block_till_done()\n assert call_lock\n assert call_lock[0].data[ATTR_ENTITY_ID] == entity_id\n assert call_lock[0].data[ATTR_CODE] == code\n assert acc.char_target_state.value == 1\n assert len(events) == 1\n assert events[-1].data[ATTR_VALUE] is None\n\n await hass.async_add_executor_job(acc.char_target_state.client_update_value, 0)\n await hass.async_block_till_done()\n assert call_unlock\n assert call_unlock[0].data[ATTR_ENTITY_ID] == entity_id\n assert call_unlock[0].data[ATTR_CODE] == code\n assert acc.char_target_state.value == 0\n assert len(events) == 2\n assert events[-1].data[ATTR_VALUE] is None\n\n\n@pytest.mark.parametrize(\"config\", [{}, {ATTR_CODE: None}])\nasync def test_no_code(hass, hk_driver, config, events):\n \"\"\"Test accessory if lock doesn't require a code.\"\"\"\n entity_id = \"lock.kitchen_door\"\n\n hass.states.async_set(entity_id, None)\n await hass.async_block_till_done()\n acc = Lock(hass, hk_driver, \"Lock\", entity_id, 2, config)\n\n # Set from HomeKit\n call_lock = async_mock_service(hass, DOMAIN, \"lock\")\n\n await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)\n await hass.async_block_till_done()\n assert call_lock\n assert call_lock[0].data[ATTR_ENTITY_ID] == entity_id\n assert ATTR_CODE not in call_lock[0].data\n assert acc.char_target_state.value == 1\n assert len(events) == 1\n assert events[-1].data[ATTR_VALUE] is None\n\n"}}},{"rowIdx":443,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.components.media_player import DOMAIN as MP_DOMAIN\n\nfrom .const import DOMAIN, HASS_DATA_REMOVE_LISTENERS_KEY, HASS_DATA_UPDATER_KEY\n\n\nasync def async_setup(hass, config):\n \"\"\"Set up the forked-daapd component.\"\"\"\n return True\n\n\nasync def async_setup_entry(hass, entry):\n \"\"\"Set up forked-daapd from a config entry by forwarding to platform.\"\"\"\n hass.async_create_task(\n hass.config_entries.async_forward_entry_setup(entry, MP_DOMAIN)\n )\n return True\n\n\nasync def async_unload_entry(hass, entry):\n \"\"\"Remove forked-daapd component.\"\"\"\n status = await hass.config_entries.async_forward_entry_unload(entry, MP_DOMAIN)\n if status and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(entry.entry_id):\n hass.data[DOMAIN][entry.entry_id][\n HASS_DATA_UPDATER_KEY\n ].websocket_handler.cancel()\n for remove_listener in hass.data[DOMAIN][entry.entry_id][\n HASS_DATA_REMOVE_LISTENERS_KEY\n ]:\n remove_listener()\n del hass.data[DOMAIN][entry.entry_id]\n if not hass.data[DOMAIN]:\n del hass.data[DOMAIN]\n return status\n\n"}}},{"rowIdx":444,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nfrom types import ModuleType\nfrom typing import Any, Callable, Dict, Optional\n\nimport attr\n\nfrom homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE\nfrom homeassistant.core import callback\nfrom homeassistant.exceptions import HomeAssistantError\nfrom homeassistant.helpers import config_per_platform\nfrom homeassistant.helpers.event import async_track_time_interval\nfrom homeassistant.helpers.typing import ConfigType, HomeAssistantType\nfrom homeassistant.setup import async_prepare_setup_platform\nfrom homeassistant.util import dt as dt_util\n\nfrom .const import (\n CONF_SCAN_INTERVAL,\n DOMAIN,\n LOGGER,\n PLATFORM_TYPE_LEGACY,\n SCAN_INTERVAL,\n SOURCE_TYPE_ROUTER,\n)\n\n\n@attr.s\nclass DeviceTrackerPlatform:\n \"\"\"Class to hold platform information.\"\"\"\n\n LEGACY_SETUP = (\n \"async_get_scanner\",\n \"get_scanner\",\n \"async_setup_scanner\",\n \"setup_scanner\",\n )\n\n name: str = attr.ib()\n platform: ModuleType = attr.ib()\n config: Dict = attr.ib()\n\n @property\n def type(self):\n \"\"\"Return platform type.\"\"\"\n for methods, platform_type in ((self.LEGACY_SETUP, PLATFORM_TYPE_LEGACY),):\n for meth in methods:\n if hasattr(self.platform, meth):\n return platform_type\n\n return None\n\n async def async_setup_legacy(self, hass, tracker, discovery_info=None):\n \"\"\"Set up a legacy platform.\"\"\"\n LOGGER.info(\"Setting up %s.%s\", DOMAIN, self.type)\n try:\n scanner = None\n setup = None\n if hasattr(self.platform, \"async_get_scanner\"):\n scanner = await self.platform.async_get_scanner(\n hass, {DOMAIN: self.config}\n )\n elif hasattr(self.platform, \"get_scanner\"):\n scanner = await hass.async_add_executor_job(\n self.platform.get_scanner, hass, {DOMAIN: self.config}\n )\n elif hasattr(self.platform, \"async_setup_scanner\"):\n setup = await self.platform.async_setup_scanner(\n hass, self.config, tracker.async_see, discovery_info\n )\n elif hasattr(self.platform, \"setup_scanner\"):\n setup = await hass.async_add_executor_job(\n self.platform.setup_scanner,\n hass,\n self.config,\n tracker.see,\n discovery_info,\n )\n else:\n raise HomeAssistantError(\"Invalid legacy device_tracker platform.\")\n\n if scanner:\n async_setup_scanner_platform(\n hass, self.config, scanner, tracker.async_see, self.type\n )\n return\n\n if not setup:\n LOGGER.error(\"Error setting up platform %s\", self.type)\n return\n\n except Exception: # pylint: disable=broad-except\n LOGGER.exception(\"Error setting up platform %s\", self.type)\n\n\nasync def async_extract_config(hass, config):\n \"\"\"Extract device tracker config and split between legacy and modern.\"\"\"\n legacy = []\n\n for platform in await asyncio.gather(\n *(\n async_create_platform_type(hass, config, p_type, p_config)\n for p_type, p_config in config_per_platform(config, DOMAIN)\n )\n ):\n if platform is None:\n continue\n\n if platform.type == PLATFORM_TYPE_LEGACY:\n legacy.append(platform)\n else:\n raise ValueError(\n f\"Unable to determine type for {platform.name}: {platform.type}\"\n )\n\n return legacy\n\n\nasync def async_create_platform_type(\n hass, config, p_type, p_config\n) -> Optional[DeviceTrackerPlatform]:\n \"\"\"Determine type of platform.\"\"\"\n platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)\n\n if platform is None:\n return None\n\n return DeviceTrackerPlatform(p_type, platform, p_config)\n\n\n@callback\ndef async_setup_scanner_platform(\n hass: HomeAssistantType,\n config: ConfigType,\n scanner: Any,\n async_see_device: Callable,\n platform: str,\n):\n \"\"\"Set up the connect scanner-based platform to device tracker.\n\n This method must be run in the event loop.\n \"\"\"\n interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)\n update_lock = asyncio.Lock()\n scanner.hass = hass\n\n # Initial scan of each mac we also tell about host name for config\n seen: Any = set()\n\n async def async_device_tracker_scan(now: dt_util.dt.datetime):\n \"\"\"Handle interval matches.\"\"\"\n if update_lock.locked():\n LOGGER.warning(\n \"Updating device list from %s took longer than the scheduled \"\n \"scan interval %s\",\n platform,\n interval,\n )\n return\n\n async with update_lock:\n found_devices = await scanner.async_scan_devices()\n\n for mac in found_devices:\n if mac in seen:\n host_name = None\n else:\n host_name = await scanner.async_get_device_name(mac)\n seen.add(mac)\n\n try:\n extra_attributes = await scanner.async_get_extra_attributes(mac)\n except NotImplementedError:\n extra_attributes = {}\n\n kwargs = {\n \"mac\": mac,\n \"host_name\": host_name,\n \"source_type\": SOURCE_TYPE_ROUTER,\n \"attributes\": {\n \"scanner\": scanner.__class__.__name__,\n **extra_attributes,\n },\n }\n\n zone_home = hass.states.get(hass.components.zone.ENTITY_ID_HOME)\n if zone_home:\n kwargs[\"gps\"] = [\n zone_home.attributes[ATTR_LATITUDE],\n zone_home.attributes[ATTR_LONGITUDE],\n ]\n kwargs[\"gps_accuracy\"] = 0\n\n hass.async_create_task(async_see_device(**kwargs))\n\n async_track_time_interval(hass, async_device_tracker_scan, interval)\n hass.async_create_task(async_device_tracker_scan(None))\n\n"}}},{"rowIdx":445,"cells":{"text":{"kind":"string","value":"\nfrom test import CollectorTestCase\nfrom test import get_collector_config\nfrom test import unittest\nfrom mock import Mock\nfrom mock import patch\n\nfrom diamond.collector import Collector\nfrom nfacct import NetfilterAccountingCollector\n\n##########################################################################\n\n\nclass TestNetfilterAccountingCollector(CollectorTestCase):\n\n def setUp(self):\n config = get_collector_config('NetfilterAccountingCollector', {\n 'interval': 10,\n 'bin': 'true',\n })\n\n self.collector = NetfilterAccountingCollector(config, None)\n\n def test_import(self):\n self.assertTrue(NetfilterAccountingCollector)\n\n @patch.object(Collector, 'publish')\n def test_no_counters(self, publish_mock):\n patch_communicate = patch(\n 'subprocess.Popen.communicate',\n Mock(return_value=('', '')))\n patch_communicate.start()\n self.collector.collect()\n patch_communicate.stop()\n\n self.assertPublishedMany(publish_mock, {})\n\n @patch.object(Collector, 'publish')\n def test_counters(self, publish_mock):\n patch_communicate = patch(\n 'subprocess.Popen.communicate',\n Mock(return_value=(self.getFixture('nfacct').getvalue(), '')))\n patch_communicate.start()\n self.collector.collect()\n patch_communicate.stop()\n\n self.assertPublishedMany(publish_mock, {\n 'Tcp.pkts': 3,\n 'Tcp.bytes': 300,\n 'Udp.pkts': 0,\n 'Udp.bytes': 0,\n 'Tcp.Incoming.pkts': 1,\n 'Tcp.Incoming.bytes': 100,\n 'Tcp.Outgoing.pkts': 2,\n 'Tcp.Outgoing.bytes': 200,\n })\n\n\n##########################################################################\nif __name__ == \"__main__\":\n unittest.main()\n\n"}}},{"rowIdx":446,"cells":{"text":{"kind":"string","value":"\nfrom os import environ\nfrom sys import executable\nfrom functools import partial\n\nfrom riko.parsers import _make_content, entity2text\n\ntry:\n from twisted.internet.defer import maybeDeferred, Deferred\nexcept ImportError:\n maybeDeferred = lambda *args: None\nelse:\n from twisted.internet import defer\n from twisted.internet.utils import getProcessOutput\n from twisted.internet.reactor import callLater\n\n from . import microdom\n from .microdom import EntityReference\n\n async_none = defer.succeed(None)\n async_return = partial(defer.succeed)\n async_partial = lambda f, **kwargs: partial(maybeDeferred, f, **kwargs)\n\n\ndef async_sleep(seconds):\n d = Deferred()\n callLater(seconds, d.callback, None)\n return d\n\n\ndef defer_to_process(command):\n return getProcessOutput(executable, ['-c', command], environ)\n\n\ndef xml2etree(f, xml=True):\n readable = hasattr(f, 'read')\n\n if xml and readable:\n parse = microdom.parseXML\n elif readable:\n parse = partial(microdom.parse, lenient=True)\n elif xml:\n parse = microdom.parseXMLString\n else:\n parse = partial(microdom.parseString, lenient=True)\n\n return parse(f)\n\n\ndef etree2dict(element, tag='content'):\n \"\"\"Convert a microdom element tree into a dict imitating how Yahoo Pipes\n does it.\n\n TODO: checkout twisted.words.xish\n \"\"\"\n i = dict(element.attributes) if hasattr(element, 'attributes') else {}\n value = element.nodeValue if hasattr(element, 'nodeValue') else None\n\n if isinstance(element, EntityReference):\n value = entity2text(value)\n\n i.update(_make_content(i, value, tag))\n\n for child in element.childNodes:\n tag = child.tagName if hasattr(child, 'tagName') else 'content'\n value = etree2dict(child, tag)\n\n # try to join the content first since microdom likes to split up\n # elements that contain a mix of text and entity reference\n try:\n i.update(_make_content(i, value, tag, append=False))\n except TypeError:\n i.update(_make_content(i, value, tag))\n\n if ('content' in i) and not set(i).difference(['content']):\n # element is leaf node and doesn't have attributes\n i = i['content']\n\n return i\n\n"}}},{"rowIdx":447,"cells":{"text":{"kind":"string","value":"\nimport logging\nfrom typing import List, Optional\n\nfrom pizone import Controller, Zone\n\nfrom homeassistant.components.climate import ClimateEntity\nfrom homeassistant.components.climate.const import (\n FAN_AUTO,\n FAN_HIGH,\n FAN_LOW,\n FAN_MEDIUM,\n HVAC_MODE_COOL,\n HVAC_MODE_DRY,\n HVAC_MODE_FAN_ONLY,\n HVAC_MODE_HEAT,\n HVAC_MODE_HEAT_COOL,\n HVAC_MODE_OFF,\n PRESET_ECO,\n PRESET_NONE,\n SUPPORT_FAN_MODE,\n SUPPORT_PRESET_MODE,\n SUPPORT_TARGET_TEMPERATURE,\n)\nfrom homeassistant.const import (\n ATTR_TEMPERATURE,\n CONF_EXCLUDE,\n PRECISION_HALVES,\n PRECISION_TENTHS,\n TEMP_CELSIUS,\n)\nfrom homeassistant.core import callback\nfrom homeassistant.helpers.dispatcher import async_dispatcher_connect\nfrom homeassistant.helpers.temperature import display_temp as show_temp\nfrom homeassistant.helpers.typing import ConfigType, HomeAssistantType\n\nfrom .const import (\n DATA_CONFIG,\n DATA_DISCOVERY_SERVICE,\n DISPATCH_CONTROLLER_DISCONNECTED,\n DISPATCH_CONTROLLER_DISCOVERED,\n DISPATCH_CONTROLLER_RECONNECTED,\n DISPATCH_CONTROLLER_UPDATE,\n DISPATCH_ZONE_UPDATE,\n IZONE,\n)\n\n_LOGGER = logging.getLogger(__name__)\n\n_IZONE_FAN_TO_HA = {\n Controller.Fan.LOW: FAN_LOW,\n Controller.Fan.MED: FAN_MEDIUM,\n Controller.Fan.HIGH: FAN_HIGH,\n Controller.Fan.AUTO: FAN_AUTO,\n}\n\n\nasync def async_setup_entry(\n hass: HomeAssistantType, config: ConfigType, async_add_entities\n):\n \"\"\"Initialize an IZone Controller.\"\"\"\n disco = hass.data[DATA_DISCOVERY_SERVICE]\n\n @callback\n def init_controller(ctrl: Controller):\n \"\"\"Register the controller device and the containing zones.\"\"\"\n conf = hass.data.get(DATA_CONFIG) # type: ConfigType\n\n # Filter out any entities excluded in the config file\n if conf and ctrl.device_uid in conf[CONF_EXCLUDE]:\n _LOGGER.info(\"Controller UID=%s ignored as excluded\", ctrl.device_uid)\n return\n _LOGGER.info(\"Controller UID=%s discovered\", ctrl.device_uid)\n\n device = ControllerDevice(ctrl)\n async_add_entities([device])\n async_add_entities(device.zones.values())\n\n # create any components not yet created\n for controller in disco.pi_disco.controllers.values():\n init_controller(controller)\n\n # connect to register any further components\n async_dispatcher_connect(hass, DISPATCH_CONTROLLER_DISCOVERED, init_controller)\n\n return True\n\n\ndef _return_on_connection_error(ret=None):\n def wrap(func):\n def wrapped_f(*args, **kwargs):\n if not args[0].available:\n return ret\n try:\n return func(*args, **kwargs)\n except ConnectionError:\n return ret\n\n return wrapped_f\n\n return wrap\n\n\nclass ControllerDevice(ClimateEntity):\n \"\"\"Representation of iZone Controller.\"\"\"\n\n def __init__(self, controller: Controller) -> None:\n \"\"\"Initialise ControllerDevice.\"\"\"\n self._controller = controller\n\n self._supported_features = SUPPORT_FAN_MODE\n\n if (\n controller.ras_mode == \"master\" and controller.zone_ctrl == 13\n ) or controller.ras_mode == \"RAS\":\n self._supported_features |= SUPPORT_TARGET_TEMPERATURE\n\n self._state_to_pizone = {\n HVAC_MODE_COOL: Controller.Mode.COOL,\n HVAC_MODE_HEAT: Controller.Mode.HEAT,\n HVAC_MODE_HEAT_COOL: Controller.Mode.AUTO,\n HVAC_MODE_FAN_ONLY: Controller.Mode.VENT,\n HVAC_MODE_DRY: Controller.Mode.DRY,\n }\n if controller.free_air_enabled:\n self._supported_features |= SUPPORT_PRESET_MODE\n\n self._fan_to_pizone = {}\n for fan in controller.fan_modes:\n self._fan_to_pizone[_IZONE_FAN_TO_HA[fan]] = fan\n self._available = True\n\n self._device_info = {\n \"identifiers\": {(IZONE, self.unique_id)},\n \"name\": self.name,\n \"manufacturer\": \"IZone\",\n \"model\": self._controller.sys_type,\n }\n\n # Create the zones\n self.zones = {}\n for zone in controller.zones:\n self.zones[zone] = ZoneDevice(self, zone)\n\n async def async_added_to_hass(self):\n \"\"\"Call on adding to hass.\"\"\"\n # Register for connect/disconnect/update events\n @callback\n def controller_disconnected(ctrl: Controller, ex: Exception) -> None:\n \"\"\"Disconnected from controller.\"\"\"\n if ctrl is not self._controller:\n return\n self.set_available(False, ex)\n\n self.async_on_remove(\n async_dispatcher_connect(\n self.hass, DISPATCH_CONTROLLER_DISCONNECTED, controller_disconnected\n )\n )\n\n @callback\n def controller_reconnected(ctrl: Controller) -> None:\n \"\"\"Reconnected to controller.\"\"\"\n if ctrl is not self._controller:\n return\n self.set_available(True)\n\n self.async_on_remove(\n async_dispatcher_connect(\n self.hass, DISPATCH_CONTROLLER_RECONNECTED, controller_reconnected\n )\n )\n\n @callback\n def controller_update(ctrl: Controller) -> None:\n \"\"\"Handle controller data updates.\"\"\"\n if ctrl is not self._controller:\n return\n self.async_write_ha_state()\n for zone in self.zones.values():\n zone.async_schedule_update_ha_state()\n\n self.async_on_remove(\n async_dispatcher_connect(\n self.hass, DISPATCH_CONTROLLER_UPDATE, controller_update\n )\n )\n\n @property\n def available(self) -> bool:\n \"\"\"Return True if entity is available.\"\"\"\n return self._available\n\n @callback\n def set_available(self, available: bool, ex: Exception = None) -> None:\n \"\"\"\n Set availability for the controller.\n\n Also sets zone availability as they follow the same availability.\n \"\"\"\n if self.available == available:\n return\n\n if available:\n _LOGGER.info(\"Reconnected controller %s \", self._controller.device_uid)\n else:\n _LOGGER.info(\n \"Controller %s disconnected due to exception: %s\",\n self._controller.device_uid,\n ex,\n )\n\n self._available = available\n self.async_write_ha_state()\n for zone in self.zones.values():\n zone.async_schedule_update_ha_state()\n\n @property\n def device_info(self):\n \"\"\"Return the device info for the iZone system.\"\"\"\n return self._device_info\n\n @property\n def unique_id(self):\n \"\"\"Return the ID of the controller device.\"\"\"\n return self._controller.device_uid\n\n @property\n def name(self) -> str:\n \"\"\"Return the name of the entity.\"\"\"\n return f\"iZone Controller {self._controller.device_uid}\"\n\n @property\n def should_poll(self) -> bool:\n \"\"\"Return True if entity has to be polled for state.\n\n False if entity pushes its state to HA.\n \"\"\"\n return False\n\n @property\n def supported_features(self) -> int:\n \"\"\"Return the list of supported features.\"\"\"\n return self._supported_features\n\n @property\n def temperature_unit(self) -> str:\n \"\"\"Return the unit of measurement which this thermostat uses.\"\"\"\n return TEMP_CELSIUS\n\n @property\n def precision(self) -> float:\n \"\"\"Return the precision of the system.\"\"\"\n return PRECISION_TENTHS\n\n @property\n def device_state_attributes(self):\n \"\"\"Return the optional state attributes.\"\"\"\n return {\n \"supply_temperature\": show_temp(\n self.hass,\n self.supply_temperature,\n self.temperature_unit,\n self.precision,\n ),\n \"temp_setpoint\": show_temp(\n self.hass,\n self._controller.temp_setpoint,\n self.temperature_unit,\n PRECISION_HALVES,\n ),\n }\n\n @property\n def hvac_mode(self) -> str:\n \"\"\"Return current operation ie. heat, cool, idle.\"\"\"\n if not self._controller.is_on:\n return HVAC_MODE_OFF\n mode = self._controller.mode\n if mode == Controller.Mode.FREE_AIR:\n return HVAC_MODE_FAN_ONLY\n for (key, value) in self._state_to_pizone.items():\n if value == mode:\n return key\n assert False, \"Should be unreachable\"\n\n @property\n @_return_on_connection_error([])\n def hvac_modes(self) -> List[str]:\n \"\"\"Return the list of available operation modes.\"\"\"\n if self._controller.free_air:\n return [HVAC_MODE_OFF, HVAC_MODE_FAN_ONLY]\n return [HVAC_MODE_OFF, *self._state_to_pizone]\n\n @property\n @_return_on_connection_error(PRESET_NONE)\n def preset_mode(self):\n \"\"\"Eco mode is external air.\"\"\"\n return PRESET_ECO if self._controller.free_air else PRESET_NONE\n\n @property\n @_return_on_connection_error([PRESET_NONE])\n def preset_modes(self):\n \"\"\"Available preset modes, normal or eco.\"\"\"\n if self._controller.free_air_enabled:\n return [PRESET_NONE, PRESET_ECO]\n return [PRESET_NONE]\n\n @property\n @_return_on_connection_error()\n def current_temperature(self) -> Optional[float]:\n \"\"\"Return the current temperature.\"\"\"\n if self._controller.mode == Controller.Mode.FREE_AIR:\n return self._controller.temp_supply\n return self._controller.temp_return\n\n @property\n @_return_on_connection_error()\n def target_temperature(self) -> Optional[float]:\n \"\"\"Return the temperature we try to reach.\"\"\"\n if not self._supported_features & SUPPORT_TARGET_TEMPERATURE:\n return None\n return self._controller.temp_setpoint\n\n @property\n def supply_temperature(self) -> float:\n \"\"\"Return the current supply, or in duct, temperature.\"\"\"\n return self._controller.temp_supply\n\n @property\n def target_temperature_step(self) -> Optional[float]:\n \"\"\"Return the supported step of target temperature.\"\"\"\n return 0.5\n\n @property\n def fan_mode(self) -> Optional[str]:\n \"\"\"Return the fan setting.\"\"\"\n return _IZONE_FAN_TO_HA[self._controller.fan]\n\n @property\n def fan_modes(self) -> Optional[List[str]]:\n \"\"\"Return the list of available fan modes.\"\"\"\n return list(self._fan_to_pizone)\n\n @property\n @_return_on_connection_error(0.0)\n def min_temp(self) -> float:\n \"\"\"Return the minimum temperature.\"\"\"\n return self._controller.temp_min\n\n @property\n @_return_on_connection_error(50.0)\n def max_temp(self) -> float:\n \"\"\"Return the maximum temperature.\"\"\"\n return self._controller.temp_max\n\n async def wrap_and_catch(self, coro):\n \"\"\"Catch any connection errors and set unavailable.\"\"\"\n try:\n await coro\n except ConnectionError as ex:\n self.set_available(False, ex)\n else:\n self.set_available(True)\n\n async def async_set_temperature(self, **kwargs) -> None:\n \"\"\"Set new target temperature.\"\"\"\n if not self.supported_features & SUPPORT_TARGET_TEMPERATURE:\n self.async_schedule_update_ha_state(True)\n return\n temp = kwargs.get(ATTR_TEMPERATURE)\n if temp is not None:\n await self.wrap_and_catch(self._controller.set_temp_setpoint(temp))\n\n async def async_set_fan_mode(self, fan_mode: str) -> None:\n \"\"\"Set new target fan mode.\"\"\"\n fan = self._fan_to_pizone[fan_mode]\n await self.wrap_and_catch(self._controller.set_fan(fan))\n\n async def async_set_hvac_mode(self, hvac_mode: str) -> None:\n \"\"\"Set new target operation mode.\"\"\"\n if hvac_mode == HVAC_MODE_OFF:\n await self.wrap_and_catch(self._controller.set_on(False))\n return\n if not self._controller.is_on:\n await self.wrap_and_catch(self._controller.set_on(True))\n if self._controller.free_air:\n return\n mode = self._state_to_pizone[hvac_mode]\n await self.wrap_and_catch(self._controller.set_mode(mode))\n\n async def async_set_preset_mode(self, preset_mode: str) -> None:\n \"\"\"Set the preset mode.\"\"\"\n await self.wrap_and_catch(\n self._controller.set_free_air(preset_mode == PRESET_ECO)\n )\n\n async def async_turn_on(self) -> None:\n \"\"\"Turn the entity on.\"\"\"\n await self.wrap_and_catch(self._controller.set_on(True))\n\n\nclass ZoneDevice(ClimateEntity):\n \"\"\"Representation of iZone Zone.\"\"\"\n\n def __init__(self, controller: ControllerDevice, zone: Zone) -> None:\n \"\"\"Initialise ZoneDevice.\"\"\"\n self._controller = controller\n self._zone = zone\n self._name = zone.name.title()\n\n self._supported_features = 0\n if zone.type != Zone.Type.AUTO:\n self._state_to_pizone = {\n HVAC_MODE_OFF: Zone.Mode.CLOSE,\n HVAC_MODE_FAN_ONLY: Zone.Mode.OPEN,\n }\n else:\n self._state_to_pizone = {\n HVAC_MODE_OFF: Zone.Mode.CLOSE,\n HVAC_MODE_FAN_ONLY: Zone.Mode.OPEN,\n HVAC_MODE_HEAT_COOL: Zone.Mode.AUTO,\n }\n self._supported_features |= SUPPORT_TARGET_TEMPERATURE\n\n self._device_info = {\n \"identifiers\": {(IZONE, controller.unique_id, zone.index)},\n \"name\": self.name,\n \"manufacturer\": \"IZone\",\n \"via_device\": (IZONE, controller.unique_id),\n \"model\": zone.type.name.title(),\n }\n\n async def async_added_to_hass(self):\n \"\"\"Call on adding to hass.\"\"\"\n\n @callback\n def zone_update(ctrl: Controller, zone: Zone) -> None:\n \"\"\"Handle zone data updates.\"\"\"\n if zone is not self._zone:\n return\n self._name = zone.name.title()\n self.async_write_ha_state()\n\n self.async_on_remove(\n async_dispatcher_connect(self.hass, DISPATCH_ZONE_UPDATE, zone_update)\n )\n\n @property\n def available(self) -> bool:\n \"\"\"Return True if entity is available.\"\"\"\n return self._controller.available\n\n @property\n def assumed_state(self) -> bool:\n \"\"\"Return True if unable to access real state of the entity.\"\"\"\n return self._controller.assumed_state\n\n @property\n def device_info(self):\n \"\"\"Return the device info for the iZone system.\"\"\"\n return self._device_info\n\n @property\n def unique_id(self):\n \"\"\"Return the ID of the controller device.\"\"\"\n return f\"{self._controller.unique_id}_z{self._zone.index + 1}\"\n\n @property\n def name(self) -> str:\n \"\"\"Return the name of the entity.\"\"\"\n return self._name\n\n @property\n def should_poll(self) -> bool:\n \"\"\"Return True if entity has to be polled for state.\n\n False if entity pushes its state to HA.\n \"\"\"\n return False\n\n @property\n @_return_on_connection_error(0)\n def supported_features(self):\n \"\"\"Return the list of supported features.\"\"\"\n if self._zone.mode == Zone.Mode.AUTO:\n return self._supported_features\n return self._supported_features & ~SUPPORT_TARGET_TEMPERATURE\n\n @property\n def temperature_unit(self):\n \"\"\"Return the unit of measurement which this thermostat uses.\"\"\"\n return TEMP_CELSIUS\n\n @property\n def precision(self):\n \"\"\"Return the precision of the system.\"\"\"\n return PRECISION_TENTHS\n\n @property\n def hvac_mode(self):\n \"\"\"Return current operation ie. heat, cool, idle.\"\"\"\n mode = self._zone.mode\n for (key, value) in self._state_to_pizone.items():\n if value == mode:\n return key\n return None\n\n @property\n def hvac_modes(self):\n \"\"\"Return the list of available operation modes.\"\"\"\n return list(self._state_to_pizone)\n\n @property\n def current_temperature(self):\n \"\"\"Return the current temperature.\"\"\"\n return self._zone.temp_current\n\n @property\n def target_temperature(self):\n \"\"\"Return the temperature we try to reach.\"\"\"\n if self._zone.type != Zone.Type.AUTO:\n return None\n return self._zone.temp_setpoint\n\n @property\n def target_temperature_step(self):\n \"\"\"Return the supported step of target temperature.\"\"\"\n return 0.5\n\n @property\n def min_temp(self):\n \"\"\"Return the minimum temperature.\"\"\"\n return self._controller.min_temp\n\n @property\n def max_temp(self):\n \"\"\"Return the maximum temperature.\"\"\"\n return self._controller.max_temp\n\n async def async_set_temperature(self, **kwargs):\n \"\"\"Set new target temperature.\"\"\"\n if self._zone.mode != Zone.Mode.AUTO:\n return\n temp = kwargs.get(ATTR_TEMPERATURE)\n if temp is not None:\n await self._controller.wrap_and_catch(self._zone.set_temp_setpoint(temp))\n\n async def async_set_hvac_mode(self, hvac_mode: str) -> None:\n \"\"\"Set new target operation mode.\"\"\"\n mode = self._state_to_pizone[hvac_mode]\n await self._controller.wrap_and_catch(self._zone.set_mode(mode))\n self.async_write_ha_state()\n\n @property\n def is_on(self):\n \"\"\"Return true if on.\"\"\"\n return self._zone.mode != Zone.Mode.CLOSE\n\n async def async_turn_on(self):\n \"\"\"Turn device on (open zone).\"\"\"\n if self._zone.type == Zone.Type.AUTO:\n await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.AUTO))\n else:\n await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.OPEN))\n self.async_write_ha_state()\n\n async def async_turn_off(self):\n \"\"\"Turn device off (close zone).\"\"\"\n await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.CLOSE))\n self.async_write_ha_state()\n\n"}}},{"rowIdx":448,"cells":{"text":{"kind":"string","value":"\nfrom uuid import uuid4\n\nfrom homeassistant.components.alexa import config, smart_home\nfrom homeassistant.core import Context\n\nfrom tests.common import async_mock_service\n\nTEST_URL = \"https://api.amazonalexa.com/v3/events\"\nTEST_TOKEN_URL = \"https://api.amazon.com/auth/o2/token\"\nTEST_LOCALE = \"en-US\"\n\n\nclass MockConfig(config.AbstractConfig):\n \"\"\"Mock Alexa config.\"\"\"\n\n entity_config = {\n \"binary_sensor.test_doorbell\": {\"display_categories\": \"DOORBELL\"},\n \"binary_sensor.test_contact_forced\": {\"display_categories\": \"CONTACT_SENSOR\"},\n \"binary_sensor.test_motion_forced\": {\"display_categories\": \"MOTION_SENSOR\"},\n \"binary_sensor.test_motion_camera_event\": {\"display_categories\": \"CAMERA\"},\n \"camera.test\": {\"display_categories\": \"CAMERA\"},\n }\n\n @property\n def supports_auth(self):\n \"\"\"Return if config supports auth.\"\"\"\n return True\n\n @property\n def endpoint(self):\n \"\"\"Endpoint for report state.\"\"\"\n return TEST_URL\n\n @property\n def locale(self):\n \"\"\"Return config locale.\"\"\"\n return TEST_LOCALE\n\n def should_expose(self, entity_id):\n \"\"\"If an entity should be exposed.\"\"\"\n return True\n\n async def async_get_access_token(self):\n \"\"\"Get an access token.\"\"\"\n return \"thisisnotanacesstoken\"\n\n async def async_accept_grant(self, code):\n \"\"\"Accept a grant.\"\"\"\n\n\nDEFAULT_CONFIG = MockConfig(None)\n\n\ndef get_new_request(namespace, name, endpoint=None):\n \"\"\"Generate a new API message.\"\"\"\n raw_msg = {\n \"directive\": {\n \"header\": {\n \"namespace\": namespace,\n \"name\": name,\n \"messageId\": str(uuid4()),\n \"correlationToken\": str(uuid4()),\n \"payloadVersion\": \"3\",\n },\n \"endpoint\": {\n \"scope\": {\"type\": \"BearerToken\", \"token\": str(uuid4())},\n \"endpointId\": endpoint,\n },\n \"payload\": {},\n }\n }\n\n if not endpoint:\n raw_msg[\"directive\"].pop(\"endpoint\")\n\n return raw_msg\n\n\nasync def assert_request_calls_service(\n namespace,\n name,\n endpoint,\n service,\n hass,\n response_type=\"Response\",\n payload=None,\n instance=None,\n):\n \"\"\"Assert an API request calls a hass service.\"\"\"\n context = Context()\n request = get_new_request(namespace, name, endpoint)\n if payload:\n request[\"directive\"][\"payload\"] = payload\n if instance:\n request[\"directive\"][\"header\"][\"instance\"] = instance\n\n domain, service_name = service.split(\".\")\n calls = async_mock_service(hass, domain, service_name)\n\n msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request, context)\n await hass.async_block_till_done()\n\n assert len(calls) == 1\n call = calls[0]\n assert \"event\" in msg\n assert call.data[\"entity_id\"] == endpoint.replace(\"#\", \".\")\n assert msg[\"event\"][\"header\"][\"name\"] == response_type\n assert call.context == context\n\n return call, msg\n\n\nasync def assert_request_fails(\n namespace, name, endpoint, service_not_called, hass, payload=None\n):\n \"\"\"Assert an API request returns an ErrorResponse.\"\"\"\n request = get_new_request(namespace, name, endpoint)\n if payload:\n request[\"directive\"][\"payload\"] = payload\n\n domain, service_name = service_not_called.split(\".\")\n call = async_mock_service(hass, domain, service_name)\n\n msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request)\n await hass.async_block_till_done()\n\n assert not call\n assert \"event\" in msg\n assert msg[\"event\"][\"header\"][\"name\"] == \"ErrorResponse\"\n\n return msg\n\n\nasync def assert_power_controller_works(endpoint, on_service, off_service, hass):\n \"\"\"Assert PowerController API requests work.\"\"\"\n await assert_request_calls_service(\n \"Alexa.PowerController\", \"TurnOn\", endpoint, on_service, hass\n )\n\n await assert_request_calls_service(\n \"Alexa.PowerController\", \"TurnOff\", endpoint, off_service, hass\n )\n\n\nasync def assert_scene_controller_works(\n endpoint, activate_service, deactivate_service, hass\n):\n \"\"\"Assert SceneController API requests work.\"\"\"\n _, response = await assert_request_calls_service(\n \"Alexa.SceneController\",\n \"Activate\",\n endpoint,\n activate_service,\n hass,\n response_type=\"ActivationStarted\",\n )\n assert response[\"event\"][\"payload\"][\"cause\"][\"type\"] == \"VOICE_INTERACTION\"\n assert \"timestamp\" in response[\"event\"][\"payload\"]\n\n if deactivate_service:\n await assert_request_calls_service(\n \"Alexa.SceneController\",\n \"Deactivate\",\n endpoint,\n deactivate_service,\n hass,\n response_type=\"DeactivationStarted\",\n )\n cause_type = response[\"event\"][\"payload\"][\"cause\"][\"type\"]\n assert cause_type == \"VOICE_INTERACTION\"\n assert \"timestamp\" in response[\"event\"][\"payload\"]\n\n\nasync def reported_properties(hass, endpoint):\n \"\"\"Use ReportState to get properties and return them.\n\n The result is a ReportedProperties instance, which has methods to make\n assertions about the properties.\n \"\"\"\n request = get_new_request(\"Alexa\", \"ReportState\", endpoint)\n msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request)\n await hass.async_block_till_done()\n return ReportedProperties(msg[\"context\"][\"properties\"])\n\n\nclass ReportedProperties:\n \"\"\"Class to help assert reported properties.\"\"\"\n\n def __init__(self, properties):\n \"\"\"Initialize class.\"\"\"\n self.properties = properties\n\n def assert_not_has_property(self, namespace, name):\n \"\"\"Assert a property does not exist.\"\"\"\n for prop in self.properties:\n if prop[\"namespace\"] == namespace and prop[\"name\"] == name:\n assert False, \"Property %s:%s exists\"\n\n def assert_equal(self, namespace, name, value):\n \"\"\"Assert a property is equal to a given value.\"\"\"\n for prop in self.properties:\n if prop[\"namespace\"] == namespace and prop[\"name\"] == name:\n assert prop[\"value\"] == value\n return prop\n\n assert False, f\"property {namespace}:{name} not in {self.properties!r}\"\n\n"}}},{"rowIdx":449,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nimport logging\nfrom types import MappingProxyType\nfrom typing import Any, Dict, Iterable, Optional\n\nfrom homeassistant.const import (\n ATTR_ENTITY_ID,\n SERVICE_TURN_OFF,\n SERVICE_TURN_ON,\n STATE_OFF,\n STATE_ON,\n)\nfrom homeassistant.core import Context, State\nfrom homeassistant.helpers.typing import HomeAssistantType\n\nfrom . import (\n ATTR_DIRECTION,\n ATTR_OSCILLATING,\n ATTR_SPEED,\n DOMAIN,\n SERVICE_OSCILLATE,\n SERVICE_SET_DIRECTION,\n SERVICE_SET_SPEED,\n)\n\n_LOGGER = logging.getLogger(__name__)\n\nVALID_STATES = {STATE_ON, STATE_OFF}\nATTRIBUTES = { # attribute: service\n ATTR_DIRECTION: SERVICE_SET_DIRECTION,\n ATTR_OSCILLATING: SERVICE_OSCILLATE,\n ATTR_SPEED: SERVICE_SET_SPEED,\n}\n\n\nasync def _async_reproduce_state(\n hass: HomeAssistantType,\n state: State,\n *,\n context: Optional[Context] = None,\n reproduce_options: Optional[Dict[str, Any]] = None,\n) -> None:\n \"\"\"Reproduce a single state.\"\"\"\n cur_state = hass.states.get(state.entity_id)\n\n if cur_state is None:\n _LOGGER.warning(\"Unable to find entity %s\", state.entity_id)\n return\n\n if state.state not in VALID_STATES:\n _LOGGER.warning(\n \"Invalid state specified for %s: %s\", state.entity_id, state.state\n )\n return\n\n # Return if we are already at the right state.\n if cur_state.state == state.state and all(\n check_attr_equal(cur_state.attributes, state.attributes, attr)\n for attr in ATTRIBUTES\n ):\n return\n\n service_data = {ATTR_ENTITY_ID: state.entity_id}\n service_calls = {} # service: service_data\n\n if state.state == STATE_ON:\n # The fan should be on\n if cur_state.state != STATE_ON:\n # Turn on the fan at first\n service_calls[SERVICE_TURN_ON] = service_data\n\n for attr, service in ATTRIBUTES.items():\n # Call services to adjust the attributes\n if attr in state.attributes and not check_attr_equal(\n state.attributes, cur_state.attributes, attr\n ):\n data = service_data.copy()\n data[attr] = state.attributes[attr]\n service_calls[service] = data\n\n elif state.state == STATE_OFF:\n service_calls[SERVICE_TURN_OFF] = service_data\n\n for service, data in service_calls.items():\n await hass.services.async_call(\n DOMAIN, service, data, context=context, blocking=True\n )\n\n\nasync def async_reproduce_states(\n hass: HomeAssistantType,\n states: Iterable[State],\n *,\n context: Optional[Context] = None,\n reproduce_options: Optional[Dict[str, Any]] = None,\n) -> None:\n \"\"\"Reproduce Fan states.\"\"\"\n await asyncio.gather(\n *(\n _async_reproduce_state(\n hass, state, context=context, reproduce_options=reproduce_options\n )\n for state in states\n )\n )\n\n\ndef check_attr_equal(\n attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str\n) -> bool:\n \"\"\"Return true if the given attributes are equal.\"\"\"\n return attr1.get(attr_str) == attr2.get(attr_str)\n\n"}}},{"rowIdx":450,"cells":{"text":{"kind":"string","value":"\nimport openzwavemqtt.const as const_ozw\nfrom openzwavemqtt.const import CommandClass, ValueGenre, ValueIndex, ValueType\n\nfrom . import const\n\nDISCOVERY_SCHEMAS = (\n { # Binary sensors\n const.DISC_COMPONENT: \"binary_sensor\",\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: CommandClass.SENSOR_BINARY,\n const.DISC_TYPE: ValueType.BOOL,\n const.DISC_GENRE: ValueGenre.USER,\n },\n \"off_delay\": {\n const.DISC_COMMAND_CLASS: CommandClass.CONFIGURATION,\n const.DISC_INDEX: 9,\n const.DISC_OPTIONAL: True,\n },\n },\n },\n { # Notification CommandClass translates to binary_sensor\n const.DISC_COMPONENT: \"binary_sensor\",\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: CommandClass.NOTIFICATION,\n const.DISC_GENRE: ValueGenre.USER,\n const.DISC_TYPE: (ValueType.BOOL, ValueType.LIST),\n }\n },\n },\n { # Z-Wave Thermostat device translates to Climate entity\n const.DISC_COMPONENT: \"climate\",\n const.DISC_GENERIC_DEVICE_CLASS: (\n const_ozw.GENERIC_TYPE_THERMOSTAT,\n const_ozw.GENERIC_TYPE_SENSOR_MULTILEVEL,\n ),\n const.DISC_SPECIFIC_DEVICE_CLASS: (\n const_ozw.SPECIFIC_TYPE_THERMOSTAT_GENERAL,\n const_ozw.SPECIFIC_TYPE_THERMOSTAT_GENERAL_V2,\n const_ozw.SPECIFIC_TYPE_SETBACK_THERMOSTAT,\n const_ozw.SPECIFIC_TYPE_THERMOSTAT_HEATING,\n const_ozw.SPECIFIC_TYPE_SETPOINT_THERMOSTAT,\n const_ozw.SPECIFIC_TYPE_NOT_USED,\n ),\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_MODE,)\n },\n \"mode\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_MODE,),\n const.DISC_OPTIONAL: True,\n },\n \"temperature\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SENSOR_MULTILEVEL,),\n const.DISC_INDEX: (1,),\n const.DISC_OPTIONAL: True,\n },\n \"fan_mode\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_FAN_MODE,),\n const.DISC_OPTIONAL: True,\n },\n \"operating_state\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_OPERATING_STATE,),\n const.DISC_OPTIONAL: True,\n },\n \"fan_action\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_FAN_STATE,),\n const.DISC_OPTIONAL: True,\n },\n \"valve_position\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),\n const.DISC_INDEX: (0,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_heating\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (1,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_cooling\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (2,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_furnace\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (7,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_dry_air\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (8,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_moist_air\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (9,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_auto_changeover\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (10,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_eco_heating\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (11,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_eco_cooling\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (12,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_away_heating\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (13,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_away_cooling\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (14,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_full_power\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (15,),\n const.DISC_OPTIONAL: True,\n },\n },\n },\n { # Z-Wave Thermostat device without mode support\n const.DISC_COMPONENT: \"climate\",\n const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_THERMOSTAT,),\n const.DISC_SPECIFIC_DEVICE_CLASS: (\n const_ozw.SPECIFIC_TYPE_SETPOINT_THERMOSTAT,\n const_ozw.SPECIFIC_TYPE_NOT_USED,\n ),\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,)\n },\n \"temperature\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SENSOR_MULTILEVEL,),\n const.DISC_INDEX: (1,),\n const.DISC_OPTIONAL: True,\n },\n \"operating_state\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_OPERATING_STATE,),\n const.DISC_OPTIONAL: True,\n },\n \"valve_position\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),\n const.DISC_INDEX: (0,),\n const.DISC_OPTIONAL: True,\n },\n \"setpoint_heating\": {\n const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),\n const.DISC_INDEX: (1,),\n const.DISC_OPTIONAL: True,\n },\n },\n },\n { # Rollershutter\n const.DISC_COMPONENT: \"cover\",\n const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,),\n const.DISC_SPECIFIC_DEVICE_CLASS: (\n const_ozw.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL,\n const_ozw.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL,\n const_ozw.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL,\n const_ozw.SPECIFIC_TYPE_MOTOR_MULTIPOSITION,\n const_ozw.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,\n const_ozw.SPECIFIC_TYPE_SECURE_DOOR,\n ),\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,\n const.DISC_GENRE: ValueGenre.USER,\n },\n \"open\": {\n const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_BRIGHT,\n const.DISC_OPTIONAL: True,\n },\n \"close\": {\n const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_DIM,\n const.DISC_OPTIONAL: True,\n },\n },\n },\n { # Garage Door Barrier\n const.DISC_COMPONENT: \"cover\",\n const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_ENTRY_CONTROL,),\n const.DISC_SPECIFIC_DEVICE_CLASS: (\n const_ozw.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,\n ),\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: CommandClass.BARRIER_OPERATOR,\n const.DISC_INDEX: ValueIndex.BARRIER_OPERATOR_LABEL,\n },\n },\n },\n { # Fan\n const.DISC_COMPONENT: \"fan\",\n const.DISC_GENERIC_DEVICE_CLASS: const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,\n const.DISC_SPECIFIC_DEVICE_CLASS: const_ozw.SPECIFIC_TYPE_FAN_SWITCH,\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,\n const.DISC_TYPE: ValueType.BYTE,\n },\n },\n },\n { # Light\n const.DISC_COMPONENT: \"light\",\n const.DISC_GENERIC_DEVICE_CLASS: (\n const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,\n const_ozw.GENERIC_TYPE_SWITCH_REMOTE,\n ),\n const.DISC_SPECIFIC_DEVICE_CLASS: (\n const_ozw.SPECIFIC_TYPE_POWER_SWITCH_MULTILEVEL,\n const_ozw.SPECIFIC_TYPE_SCENE_SWITCH_MULTILEVEL,\n const_ozw.SPECIFIC_TYPE_COLOR_TUNABLE_BINARY,\n const_ozw.SPECIFIC_TYPE_COLOR_TUNABLE_MULTILEVEL,\n const_ozw.SPECIFIC_TYPE_NOT_USED,\n ),\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,\n const.DISC_TYPE: ValueType.BYTE,\n },\n \"dimming_duration\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),\n const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_DURATION,\n const.DISC_OPTIONAL: True,\n },\n \"color\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_COLOR,),\n const.DISC_INDEX: ValueIndex.SWITCH_COLOR_COLOR,\n const.DISC_OPTIONAL: True,\n },\n \"color_channels\": {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_COLOR,),\n const.DISC_INDEX: ValueIndex.SWITCH_COLOR_CHANNELS,\n const.DISC_OPTIONAL: True,\n },\n \"min_kelvin\": {\n const.DISC_COMMAND_CLASS: (CommandClass.CONFIGURATION,),\n const.DISC_INDEX: 81, # PR for upstream to add SWITCH_COLOR_CT_WARM\n const.DISC_OPTIONAL: True,\n },\n \"max_kelvin\": {\n const.DISC_COMMAND_CLASS: (CommandClass.CONFIGURATION,),\n const.DISC_INDEX: 82, # PR for upstream to add SWITCH_COLOR_CT_COLD\n const.DISC_OPTIONAL: True,\n },\n },\n },\n { # All other text/numeric sensors\n const.DISC_COMPONENT: \"sensor\",\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (\n CommandClass.SENSOR_MULTILEVEL,\n CommandClass.METER,\n CommandClass.ALARM,\n CommandClass.SENSOR_ALARM,\n CommandClass.INDICATOR,\n CommandClass.BATTERY,\n CommandClass.NOTIFICATION,\n CommandClass.BASIC,\n ),\n const.DISC_TYPE: (\n ValueType.DECIMAL,\n ValueType.INT,\n ValueType.STRING,\n ValueType.BYTE,\n ValueType.LIST,\n ),\n }\n },\n },\n { # Switch platform\n const.DISC_COMPONENT: \"switch\",\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_BINARY,),\n const.DISC_TYPE: ValueType.BOOL,\n const.DISC_GENRE: ValueGenre.USER,\n }\n },\n },\n { # Lock platform\n const.DISC_COMPONENT: \"lock\",\n const.DISC_VALUES: {\n const.DISC_PRIMARY: {\n const.DISC_COMMAND_CLASS: (CommandClass.DOOR_LOCK,),\n const.DISC_TYPE: ValueType.BOOL,\n const.DISC_GENRE: ValueGenre.USER,\n }\n },\n },\n)\n\n\ndef check_node_schema(node, schema):\n \"\"\"Check if node matches the passed node schema.\"\"\"\n if const.DISC_NODE_ID in schema and node.node_id not in schema[const.DISC_NODE_ID]:\n return False\n if const.DISC_GENERIC_DEVICE_CLASS in schema and not eq_or_in(\n node.node_generic, schema[const.DISC_GENERIC_DEVICE_CLASS]\n ):\n return False\n if const.DISC_SPECIFIC_DEVICE_CLASS in schema and not eq_or_in(\n node.node_specific, schema[const.DISC_SPECIFIC_DEVICE_CLASS]\n ):\n return False\n return True\n\n\ndef check_value_schema(value, schema):\n \"\"\"Check if the value matches the passed value schema.\"\"\"\n if const.DISC_COMMAND_CLASS in schema and not eq_or_in(\n value.parent.command_class_id, schema[const.DISC_COMMAND_CLASS]\n ):\n return False\n if const.DISC_TYPE in schema and not eq_or_in(value.type, schema[const.DISC_TYPE]):\n return False\n if const.DISC_GENRE in schema and not eq_or_in(\n value.genre, schema[const.DISC_GENRE]\n ):\n return False\n if const.DISC_INDEX in schema and not eq_or_in(\n value.index, schema[const.DISC_INDEX]\n ):\n return False\n if const.DISC_INSTANCE in schema and not eq_or_in(\n value.instance, schema[const.DISC_INSTANCE]\n ):\n return False\n\n return True\n\n\ndef eq_or_in(val, options):\n \"\"\"Return True if options contains value or if value is equal to options.\"\"\"\n return val in options if isinstance(options, tuple) else val == options\n\n"}}},{"rowIdx":451,"cells":{"text":{"kind":"string","value":"\nfrom requests import ConnectTimeout\n\nfrom homeassistant.components.camera.const import DOMAIN as CAMERA_DOMAIN\nfrom homeassistant.components.canary.const import CONF_FFMPEG_ARGUMENTS, DOMAIN\nfrom homeassistant.config_entries import (\n ENTRY_STATE_LOADED,\n ENTRY_STATE_NOT_LOADED,\n ENTRY_STATE_SETUP_RETRY,\n)\nfrom homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME\nfrom homeassistant.setup import async_setup_component\n\nfrom . import YAML_CONFIG, init_integration\n\nfrom tests.async_mock import patch\n\n\nasync def test_import_from_yaml(hass, canary) -> None:\n \"\"\"Test import from YAML.\"\"\"\n with patch(\n \"homeassistant.components.canary.async_setup_entry\",\n return_value=True,\n ):\n assert await async_setup_component(hass, DOMAIN, {DOMAIN: YAML_CONFIG})\n await hass.async_block_till_done()\n\n entries = hass.config_entries.async_entries(DOMAIN)\n assert len(entries) == 1\n\n assert entries[0].data[CONF_USERNAME] == \"test-username\"\n assert entries[0].data[CONF_PASSWORD] == \"test-password\"\n assert entries[0].data[CONF_TIMEOUT] == 5\n\n\nasync def test_import_from_yaml_ffmpeg(hass, canary) -> None:\n \"\"\"Test import from YAML with ffmpeg arguments.\"\"\"\n with patch(\n \"homeassistant.components.canary.async_setup_entry\",\n return_value=True,\n ):\n assert await async_setup_component(\n hass,\n DOMAIN,\n {\n DOMAIN: YAML_CONFIG,\n CAMERA_DOMAIN: [{\"platform\": DOMAIN, CONF_FFMPEG_ARGUMENTS: \"-v\"}],\n },\n )\n await hass.async_block_till_done()\n\n entries = hass.config_entries.async_entries(DOMAIN)\n assert len(entries) == 1\n\n assert entries[0].data[CONF_USERNAME] == \"test-username\"\n assert entries[0].data[CONF_PASSWORD] == \"test-password\"\n assert entries[0].data[CONF_TIMEOUT] == 5\n assert entries[0].data.get(CONF_FFMPEG_ARGUMENTS) == \"-v\"\n\n\nasync def test_unload_entry(hass, canary):\n \"\"\"Test successful unload of entry.\"\"\"\n entry = await init_integration(hass)\n\n assert entry\n assert len(hass.config_entries.async_entries(DOMAIN)) == 1\n assert entry.state == ENTRY_STATE_LOADED\n\n assert await hass.config_entries.async_unload(entry.entry_id)\n await hass.async_block_till_done()\n\n assert entry.state == ENTRY_STATE_NOT_LOADED\n assert not hass.data.get(DOMAIN)\n\n\nasync def test_async_setup_raises_entry_not_ready(hass, canary):\n \"\"\"Test that it throws ConfigEntryNotReady when exception occurs during setup.\"\"\"\n canary.side_effect = ConnectTimeout()\n\n entry = await init_integration(hass)\n assert entry\n assert entry.state == ENTRY_STATE_SETUP_RETRY\n\n"}}},{"rowIdx":452,"cells":{"text":{"kind":"string","value":"\nimport json\nimport threading\n\nfrom absl import flags\nfrom perfkitbenchmarker import network\nfrom perfkitbenchmarker import providers\nfrom perfkitbenchmarker import resource\nfrom perfkitbenchmarker.providers.rackspace import util\n\nFLAGS = flags.FLAGS\n\nINGRESS = 'ingress'\nEGRESS = 'egress'\nSEC_GROUP_DIRECTIONS = frozenset([INGRESS, EGRESS])\n\nIPV4 = 'ipv4'\nIPV6 = 'ipv6'\nETHER_TYPES = frozenset([IPV4, IPV6])\n\nTCP = 'tcp'\nUDP = 'udp'\nICMP = 'icmp'\nSEC_GROUP_PROTOCOLS = frozenset([TCP, UDP, ICMP])\n\nPORT_RANGE_MIN = '1'\nPORT_RANGE_MAX = '65535'\n\nPUBLIC_NET_ID = '00000000-0000-0000-0000-000000000000'\nSERVICE_NET_ID = '11111111-1111-1111-1111-111111111111'\nDEFAULT_SUBNET_CIDR = '192.168.0.0/16'\n\nSSH_PORT = 22\n\n\nclass RackspaceSecurityGroup(resource.BaseResource):\n \"\"\"An object representing a Rackspace Security Group.\"\"\"\n\n def __init__(self, name):\n super(RackspaceSecurityGroup, self).__init__()\n self.name = name\n self.id = None\n\n def _Create(self):\n cmd = util.RackCLICommand(self, 'networks', 'security-group', 'create')\n cmd.flags['name'] = self.name\n stdout, stderr, _ = cmd.Issue()\n resp = json.loads(stdout)\n self.id = resp['ID']\n\n def _Delete(self):\n if self.id is None:\n return\n cmd = util.RackCLICommand(self, 'networks', 'security-group', 'delete')\n cmd.flags['id'] = self.id\n cmd.Issue()\n\n def _Exists(self):\n if self.id is None:\n return False\n cmd = util.RackCLICommand(self, 'networks', 'security-group', 'get')\n cmd.flags['id'] = self.id\n stdout, stderr, _ = cmd.Issue()\n return not stderr\n\n\nclass RackspaceSecurityGroupRule(resource.BaseResource):\n \"\"\"An object representing a Security Group Rule.\"\"\"\n\n def __init__(self, sec_group_rule_name, sec_group_id, direction=INGRESS,\n ip_ver=IPV4, protocol=TCP, port_range_min=PORT_RANGE_MIN,\n port_range_max=PORT_RANGE_MAX, source_cidr=None):\n super(RackspaceSecurityGroupRule, self).__init__()\n self.id = None\n self.name = sec_group_rule_name\n self.sec_group_id = sec_group_id\n assert direction in SEC_GROUP_DIRECTIONS\n self.direction = direction\n assert ip_ver in ETHER_TYPES\n self.ip_ver = ip_ver\n assert protocol in SEC_GROUP_PROTOCOLS\n self.protocol = protocol\n assert (int(PORT_RANGE_MIN) <= int(port_range_min) <= int(PORT_RANGE_MAX))\n self.port_range_min = port_range_min\n assert (int(PORT_RANGE_MIN) <= int(port_range_max) <= int(PORT_RANGE_MAX))\n self.port_range_max = port_range_max\n assert int(port_range_min) <= int(port_range_max)\n self.source_cidr = source_cidr\n\n def __eq__(self, other):\n # Name does not matter\n return (self.sec_group_id == other.sec_group_id and\n self.direction == other.direction and\n self.ip_ver == other.ip_ver and\n self.protocol == other.protocol and\n self.source_cidr == other.source_cidr)\n\n def _Create(self):\n cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'create')\n cmd.flags['security-group-id'] = self.sec_group_id\n cmd.flags['direction'] = self.direction\n cmd.flags['ether-type'] = self.ip_ver\n cmd.flags['protocol'] = self.protocol\n cmd.flags['port-range-min'] = self.port_range_min\n cmd.flags['port-range-max'] = self.port_range_max\n if self.source_cidr:\n cmd.flags['remote-ip-prefix'] = self.source_cidr\n stdout, stderr, _ = cmd.Issue()\n resp = json.loads(stdout)\n self.id = resp['ID']\n\n def _Delete(self):\n if self.id is None:\n return\n cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'delete')\n cmd.flags['id'] = self.id\n cmd.Issue()\n\n def _Exists(self):\n if self.id is None:\n return False\n cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'get')\n cmd.flags['id'] = self.id\n stdout, stderr, _ = cmd.Issue()\n return not stderr\n\n\nclass RackspaceSubnet(resource.BaseResource):\n \"\"\"An object that represents a Rackspace Subnet,\"\"\"\n\n def __init__(self, network_id, cidr, ip_ver, name=None, tenant_id=None):\n super(RackspaceSubnet, self).__init__()\n self.id = None\n self.network_id = network_id\n self.cidr = cidr\n self.ip_ver = ip_ver\n self.name = name\n self.tenant_id = tenant_id\n\n def _Create(self):\n cmd = util.RackCLICommand(self, 'networks', 'subnet', 'create')\n cmd.flags['network-id'] = self.network_id\n cmd.flags['cidr'] = self.cidr\n cmd.flags['ip-version'] = self.ip_ver\n if self.name:\n cmd.flags['name'] = self.name\n if self.tenant_id:\n cmd.flags['tenant-id'] = self.tenant_id\n stdout, stderr, _ = cmd.Issue()\n resp = json.loads(stdout)\n self.id = resp['ID']\n\n def _Delete(self):\n if self.id is None:\n return\n cmd = util.RackCLICommand(self, 'networks', 'subnet', 'delete')\n cmd.flags['id'] = self.id\n cmd.Issue()\n\n def _Exists(self):\n if self.id is None:\n return False\n cmd = util.RackCLICommand(self, 'networks', 'subnet', 'get')\n cmd.flags['id'] = self.id\n stdout, stderr, _ = cmd.Issue()\n return not stderr\n\n\nclass RackspaceNetworkSpec(network.BaseNetworkSpec):\n \"\"\"Object containing the information needed to create a Rackspace network.\"\"\"\n\n def __init__(self, tenant_id=None, region=None, **kwargs):\n super(RackspaceNetworkSpec, self).__init__(**kwargs)\n self.tenant_id = tenant_id\n self.region = region\n\n\nclass RackspaceNetworkResource(resource.BaseResource):\n \"\"\"Object representing a Rackspace Network Resource.\"\"\"\n\n def __init__(self, name, tenant_id=None):\n super(RackspaceNetworkResource, self).__init__()\n self.name = name\n self.tenant_id = tenant_id\n self.id = None\n\n def _Create(self):\n cmd = util.RackCLICommand(self, 'networks', 'network', 'create')\n cmd.flags['name'] = self.name\n if self.tenant_id:\n cmd.flags['tenant-id'] = self.tenant_id\n stdout, _, _ = cmd.Issue()\n resp = json.loads(stdout)\n if resp['ID']:\n self.id = resp['ID']\n\n def _Delete(self):\n if self.id is None:\n return\n cmd = util.RackCLICommand(self, 'networks', 'network', 'delete')\n cmd.flags['id'] = self.id\n cmd.Issue()\n\n def _Exists(self):\n if self.id is None:\n return False\n cmd = util.RackCLICommand(self, 'networks', 'network', 'get')\n cmd.flags['id'] = self.id\n stdout, stderr, _ = cmd.Issue()\n return not stderr\n\n\nclass RackspaceNetwork(network.BaseNetwork):\n \"\"\"An object representing a Rackspace Network.\"\"\"\n\n CLOUD = providers.RACKSPACE\n\n def __init__(self, network_spec):\n super(RackspaceNetwork, self).__init__(network_spec)\n self.tenant_id = network_spec.tenant_id\n name = FLAGS.rackspace_network_name or 'pkb-network-%s' % FLAGS.run_uri\n self.network_resource = RackspaceNetworkResource(name, self.tenant_id)\n self.subnet = RackspaceSubnet(self.network_resource.id, DEFAULT_SUBNET_CIDR,\n ip_ver='4', name='subnet-%s' % name,\n tenant_id=self.tenant_id)\n self.security_group = RackspaceSecurityGroup('default-internal-%s' % name)\n self.default_firewall_rules = []\n\n @staticmethod\n def _GetNetworkSpecFromVm(vm):\n return RackspaceNetworkSpec(tenant_id=vm.tenant_id, region=vm.zone)\n\n @classmethod\n def _GetKeyFromNetworkSpec(cls, spec):\n return (cls.CLOUD, spec.tenant_id, spec.region)\n\n def Create(self):\n if FLAGS.rackspace_network_name is None:\n self.network_resource.Create()\n self.subnet.Create()\n self.security_group.Create()\n self.default_firewall_rules = self._GenerateDefaultRules(\n self.security_group.id, self.network_resource.name)\n for rule in self.default_firewall_rules:\n rule.Create()\n\n def Delete(self):\n if FLAGS.rackspace_network_name is None:\n for rule in self.default_firewall_rules:\n rule.Delete()\n self.security_group.Delete()\n self.subnet.Delete()\n self.network_resource.Delete()\n\n def _GenerateDefaultRules(self, sec_group_id, network_name):\n firewall_rules = [\n RackspaceSecurityGroupRule(\n sec_group_rule_name='tcp-default-internal-%s' % network_name,\n sec_group_id=sec_group_id,\n direction=INGRESS,\n ip_ver=IPV4,\n protocol=TCP),\n RackspaceSecurityGroupRule(\n sec_group_rule_name='udp-default-internal-%s' % network_name,\n sec_group_id=sec_group_id,\n direction=INGRESS,\n ip_ver=IPV4, protocol=UDP),\n RackspaceSecurityGroupRule(\n sec_group_rule_name='icmp-default-internal-%s' % network_name,\n sec_group_id=sec_group_id,\n direction=INGRESS,\n ip_ver=IPV4, protocol=ICMP)]\n return firewall_rules\n\n\nclass RackspaceFirewall(network.BaseFirewall):\n \"\"\"An object representing a Rackspace Security Group applied to PublicNet and\n ServiceNet.\"\"\"\n\n CLOUD = providers.RACKSPACE\n\n def __init__(self):\n # TODO(meteorfox) Support a Firewall per region\n self._lock = threading.Lock() # Guards security-group creation/deletion\n self.firewall_rules = {}\n\n def AllowPort(self, vm, start_port, end_port=None, source_range=None):\n # At Rackspace all ports are open by default\n # TODO(meteorfox) Implement security groups support\n if FLAGS.rackspace_use_security_group:\n raise NotImplementedError()\n\n def DisallowAllPorts(self):\n if FLAGS.rackspace_use_security_group:\n raise NotImplementedError()\n\n"}}},{"rowIdx":453,"cells":{"text":{"kind":"string","value":"\nfrom json import loads\n\nfrom homeassistant.components.advantage_air.const import (\n ADVANTAGE_AIR_STATE_CLOSE,\n ADVANTAGE_AIR_STATE_OPEN,\n)\nfrom homeassistant.components.cover import (\n ATTR_POSITION,\n DEVICE_CLASS_DAMPER,\n DOMAIN as COVER_DOMAIN,\n SERVICE_CLOSE_COVER,\n SERVICE_OPEN_COVER,\n SERVICE_SET_COVER_POSITION,\n)\nfrom homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN\n\nfrom tests.components.advantage_air import (\n TEST_SET_RESPONSE,\n TEST_SET_URL,\n TEST_SYSTEM_DATA,\n TEST_SYSTEM_URL,\n add_mock_config,\n)\n\n\nasync def test_cover_async_setup_entry(hass, aioclient_mock):\n \"\"\"Test climate setup without sensors.\"\"\"\n\n aioclient_mock.get(\n TEST_SYSTEM_URL,\n text=TEST_SYSTEM_DATA,\n )\n aioclient_mock.get(\n TEST_SET_URL,\n text=TEST_SET_RESPONSE,\n )\n\n await add_mock_config(hass)\n\n registry = await hass.helpers.entity_registry.async_get_registry()\n\n assert len(aioclient_mock.mock_calls) == 1\n\n # Test Cover Zone Entity\n entity_id = \"cover.zone_open_without_sensor\"\n state = hass.states.get(entity_id)\n assert state\n assert state.state == STATE_OPEN\n assert state.attributes.get(\"device_class\") == DEVICE_CLASS_DAMPER\n assert state.attributes.get(\"current_position\") == 100\n\n entry = registry.async_get(entity_id)\n assert entry\n assert entry.unique_id == \"uniqueid-ac2-z01\"\n\n await hass.services.async_call(\n COVER_DOMAIN,\n SERVICE_CLOSE_COVER,\n {ATTR_ENTITY_ID: [entity_id]},\n blocking=True,\n )\n assert len(aioclient_mock.mock_calls) == 3\n assert aioclient_mock.mock_calls[-2][0] == \"GET\"\n assert aioclient_mock.mock_calls[-2][1].path == \"/setAircon\"\n data = loads(aioclient_mock.mock_calls[-2][1].query[\"json\"])\n assert data[\"ac2\"][\"zones\"][\"z01\"][\"state\"] == ADVANTAGE_AIR_STATE_CLOSE\n assert aioclient_mock.mock_calls[-1][0] == \"GET\"\n assert aioclient_mock.mock_calls[-1][1].path == \"/getSystemData\"\n\n await hass.services.async_call(\n COVER_DOMAIN,\n SERVICE_OPEN_COVER,\n {ATTR_ENTITY_ID: [entity_id]},\n blocking=True,\n )\n assert len(aioclient_mock.mock_calls) == 5\n assert aioclient_mock.mock_calls[-2][0] == \"GET\"\n assert aioclient_mock.mock_calls[-2][1].path == \"/setAircon\"\n data = loads(aioclient_mock.mock_calls[-2][1].query[\"json\"])\n assert data[\"ac2\"][\"zones\"][\"z01\"][\"state\"] == ADVANTAGE_AIR_STATE_OPEN\n assert data[\"ac2\"][\"zones\"][\"z01\"][\"value\"] == 100\n assert aioclient_mock.mock_calls[-1][0] == \"GET\"\n assert aioclient_mock.mock_calls[-1][1].path == \"/getSystemData\"\n\n await hass.services.async_call(\n COVER_DOMAIN,\n SERVICE_SET_COVER_POSITION,\n {ATTR_ENTITY_ID: [entity_id], ATTR_POSITION: 50},\n blocking=True,\n )\n assert len(aioclient_mock.mock_calls) == 7\n assert aioclient_mock.mock_calls[-2][0] == \"GET\"\n assert aioclient_mock.mock_calls[-2][1].path == \"/setAircon\"\n data = loads(aioclient_mock.mock_calls[-2][1].query[\"json\"])\n assert data[\"ac2\"][\"zones\"][\"z01\"][\"value\"] == 50\n assert aioclient_mock.mock_calls[-1][0] == \"GET\"\n assert aioclient_mock.mock_calls[-1][1].path == \"/getSystemData\"\n\n await hass.services.async_call(\n COVER_DOMAIN,\n SERVICE_SET_COVER_POSITION,\n {ATTR_ENTITY_ID: [entity_id], ATTR_POSITION: 0},\n blocking=True,\n )\n assert len(aioclient_mock.mock_calls) == 9\n assert aioclient_mock.mock_calls[-2][0] == \"GET\"\n assert aioclient_mock.mock_calls[-2][1].path == \"/setAircon\"\n data = loads(aioclient_mock.mock_calls[-2][1].query[\"json\"])\n assert data[\"ac2\"][\"zones\"][\"z01\"][\"state\"] == ADVANTAGE_AIR_STATE_CLOSE\n assert aioclient_mock.mock_calls[-1][0] == \"GET\"\n assert aioclient_mock.mock_calls[-1][1].path == \"/getSystemData\"\n\n"}}},{"rowIdx":454,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nimport logging\nfrom typing import Tuple\n\nimport discord\n\nfrom redbot.core import Config, checks, commands\nfrom redbot.core.i18n import Translator, cog_i18n\nfrom redbot.core.utils.chat_formatting import box\nfrom .announcer import Announcer\nfrom .converters import SelfRole\n\nlog = logging.getLogger(\"red.admin\")\n\nT_ = Translator(\"Admin\", __file__)\n\n_ = lambda s: s\nGENERIC_FORBIDDEN = _(\n \"I attempted to do something that Discord denied me permissions for.\"\n \" Your command failed to successfully complete.\"\n)\n\nHIERARCHY_ISSUE_ADD = _(\n \"I can not give {role.name} to {member.display_name}\"\n \" because that role is higher than or equal to my highest role\"\n \" in the Discord hierarchy.\"\n)\n\nHIERARCHY_ISSUE_REMOVE = _(\n \"I can not remove {role.name} from {member.display_name}\"\n \" because that role is higher than or equal to my highest role\"\n \" in the Discord hierarchy.\"\n)\n\nROLE_HIERARCHY_ISSUE = _(\n \"I can not edit {role.name}\"\n \" because that role is higher than my or equal to highest role\"\n \" in the Discord hierarchy.\"\n)\n\nUSER_HIERARCHY_ISSUE_ADD = _(\n \"I can not let you give {role.name} to {member.display_name}\"\n \" because that role is higher than or equal to your highest role\"\n \" in the Discord hierarchy.\"\n)\n\nUSER_HIERARCHY_ISSUE_REMOVE = _(\n \"I can not let you remove {role.name} from {member.display_name}\"\n \" because that role is higher than or equal to your highest role\"\n \" in the Discord hierarchy.\"\n)\n\nROLE_USER_HIERARCHY_ISSUE = _(\n \"I can not let you edit {role.name}\"\n \" because that role is higher than or equal to your highest role\"\n \" in the Discord hierarchy.\"\n)\n\nNEED_MANAGE_ROLES = _(\"I need manage roles permission to do that.\")\n\nRUNNING_ANNOUNCEMENT = _(\n \"I am already announcing something. If you would like to make a\"\n \" different announcement please use `{prefix}announce cancel`\"\n \" first.\"\n)\n_ = T_\n\n\n@cog_i18n(_)\nclass Admin(commands.Cog):\n \"\"\"A collection of server administration utilities.\"\"\"\n\n def __init__(self):\n self.config = Config.get_conf(self, 8237492837454039, force_registration=True)\n\n self.config.register_global(serverlocked=False, schema_version=0)\n\n self.config.register_guild(\n announce_channel=None, # Integer ID\n selfroles=[], # List of integer ID's\n )\n\n self.__current_announcer = None\n self._ready = asyncio.Event()\n asyncio.create_task(self.handle_migrations())\n # As this is a data migration, don't store this for cancelation.\n\n async def cog_before_invoke(self, ctx: commands.Context):\n await self._ready.wait()\n\n async def red_delete_data_for_user(self, **kwargs):\n \"\"\" Nothing to delete \"\"\"\n return\n\n async def handle_migrations(self):\n\n lock = self.config.get_guilds_lock()\n async with lock:\n # This prevents the edge case of someone loading admin,\n # unloading it, loading it again during a migration\n current_schema = await self.config.schema_version()\n\n if current_schema == 0:\n await self.migrate_config_from_0_to_1()\n await self.config.schema_version.set(1)\n\n self._ready.set()\n\n async def migrate_config_from_0_to_1(self):\n\n all_guilds = await self.config.all_guilds()\n\n for guild_id, guild_data in all_guilds.items():\n if guild_data.get(\"announce_ignore\", False):\n async with self.config.guild_from_id(guild_id).all(\n acquire_lock=False\n ) as guild_config:\n guild_config.pop(\"announce_channel\", None)\n guild_config.pop(\"announce_ignore\", None)\n\n def cog_unload(self):\n try:\n self.__current_announcer.cancel()\n except AttributeError:\n pass\n\n def is_announcing(self) -> bool:\n \"\"\"\n Is the bot currently announcing something?\n :return:\n \"\"\"\n if self.__current_announcer is None:\n return False\n\n return self.__current_announcer.active or False\n\n @staticmethod\n def pass_hierarchy_check(ctx: commands.Context, role: discord.Role) -> bool:\n \"\"\"\n Determines if the bot has a higher role than the given one.\n :param ctx:\n :param role: Role object.\n :return:\n \"\"\"\n return ctx.guild.me.top_role > role\n\n @staticmethod\n def pass_user_hierarchy_check(ctx: commands.Context, role: discord.Role) -> bool:\n \"\"\"\n Determines if a user is allowed to add/remove/edit the given role.\n :param ctx:\n :param role:\n :return:\n \"\"\"\n return ctx.author.top_role > role or ctx.author == ctx.guild.owner\n\n async def _addrole(\n self, ctx: commands.Context, member: discord.Member, role: discord.Role, *, check_user=True\n ):\n if role in member.roles:\n await ctx.send(\n _(\"{member.display_name} already has the role {role.name}.\").format(\n role=role, member=member\n )\n )\n return\n if check_user and not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(_(USER_HIERARCHY_ISSUE_ADD).format(role=role, member=member))\n return\n if not self.pass_hierarchy_check(ctx, role):\n await ctx.send(_(HIERARCHY_ISSUE_ADD).format(role=role, member=member))\n return\n if not ctx.guild.me.guild_permissions.manage_roles:\n await ctx.send(_(NEED_MANAGE_ROLES))\n return\n try:\n await member.add_roles(role)\n except discord.Forbidden:\n await ctx.send(_(GENERIC_FORBIDDEN))\n else:\n await ctx.send(\n _(\"I successfully added {role.name} to {member.display_name}\").format(\n role=role, member=member\n )\n )\n\n async def _removerole(\n self, ctx: commands.Context, member: discord.Member, role: discord.Role, *, check_user=True\n ):\n if role not in member.roles:\n await ctx.send(\n _(\"{member.display_name} does not have the role {role.name}.\").format(\n role=role, member=member\n )\n )\n return\n if check_user and not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(_(USER_HIERARCHY_ISSUE_REMOVE).format(role=role, member=member))\n return\n if not self.pass_hierarchy_check(ctx, role):\n await ctx.send(_(HIERARCHY_ISSUE_REMOVE).format(role=role, member=member))\n return\n if not ctx.guild.me.guild_permissions.manage_roles:\n await ctx.send(_(NEED_MANAGE_ROLES))\n return\n try:\n await member.remove_roles(role)\n except discord.Forbidden:\n await ctx.send(_(GENERIC_FORBIDDEN))\n else:\n await ctx.send(\n _(\"I successfully removed {role.name} from {member.display_name}\").format(\n role=role, member=member\n )\n )\n\n @commands.command()\n @commands.guild_only()\n @checks.admin_or_permissions(manage_roles=True)\n async def addrole(\n self, ctx: commands.Context, rolename: discord.Role, *, user: discord.Member = None\n ):\n \"\"\"\n Add a role to a user.\n\n Use double quotes if the role contains spaces.\n If user is left blank it defaults to the author of the command.\n \"\"\"\n if user is None:\n user = ctx.author\n await self._addrole(ctx, user, rolename)\n\n @commands.command()\n @commands.guild_only()\n @checks.admin_or_permissions(manage_roles=True)\n async def removerole(\n self, ctx: commands.Context, rolename: discord.Role, *, user: discord.Member = None\n ):\n \"\"\"\n Remove a role from a user.\n\n Use double quotes if the role contains spaces.\n If user is left blank it defaults to the author of the command.\n \"\"\"\n if user is None:\n user = ctx.author\n await self._removerole(ctx, user, rolename)\n\n @commands.group()\n @commands.guild_only()\n @checks.admin_or_permissions(manage_roles=True)\n async def editrole(self, ctx: commands.Context):\n \"\"\"Edit role settings.\"\"\"\n pass\n\n @editrole.command(name=\"colour\", aliases=[\"color\"])\n async def editrole_colour(\n self, ctx: commands.Context, role: discord.Role, value: discord.Colour\n ):\n \"\"\"\n Edit a role's colour.\n\n Use double quotes if the role contains spaces.\n Colour must be in hexadecimal format.\n [Online colour picker](http://www.w3schools.com/colors/colors_picker.asp)\n\n Examples:\n `[p]editrole colour \"The Transistor\" #ff0000`\n `[p]editrole colour Test #ff9900`\n \"\"\"\n author = ctx.author\n reason = \"{}({}) changed the colour of role '{}'\".format(author.name, author.id, role.name)\n\n if not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(_(ROLE_USER_HIERARCHY_ISSUE).format(role=role))\n return\n if not self.pass_hierarchy_check(ctx, role):\n await ctx.send(_(ROLE_HIERARCHY_ISSUE).format(role=role))\n return\n if not ctx.guild.me.guild_permissions.manage_roles:\n await ctx.send(_(NEED_MANAGE_ROLES))\n return\n try:\n await role.edit(reason=reason, color=value)\n except discord.Forbidden:\n await ctx.send(_(GENERIC_FORBIDDEN))\n else:\n log.info(reason)\n await ctx.send(_(\"Done.\"))\n\n @editrole.command(name=\"name\")\n async def edit_role_name(self, ctx: commands.Context, role: discord.Role, name: str):\n \"\"\"\n Edit a role's name.\n\n Use double quotes if the role or the name contain spaces.\n\n Example:\n `[p]editrole name \\\"The Transistor\\\" Test`\n \"\"\"\n author = ctx.message.author\n old_name = role.name\n reason = \"{}({}) changed the name of role '{}' to '{}'\".format(\n author.name, author.id, old_name, name\n )\n\n if not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(_(ROLE_USER_HIERARCHY_ISSUE).format(role=role))\n return\n if not self.pass_hierarchy_check(ctx, role):\n await ctx.send(_(ROLE_HIERARCHY_ISSUE).format(role=role))\n return\n if not ctx.guild.me.guild_permissions.manage_roles:\n await ctx.send(_(NEED_MANAGE_ROLES))\n return\n try:\n await role.edit(reason=reason, name=name)\n except discord.Forbidden:\n await ctx.send(_(GENERIC_FORBIDDEN))\n else:\n log.info(reason)\n await ctx.send(_(\"Done.\"))\n\n @commands.group(invoke_without_command=True)\n @checks.is_owner()\n async def announce(self, ctx: commands.Context, *, message: str):\n \"\"\"Announce a message to all servers the bot is in.\"\"\"\n if not self.is_announcing():\n announcer = Announcer(ctx, message, config=self.config)\n announcer.start()\n\n self.__current_announcer = announcer\n\n await ctx.send(_(\"The announcement has begun.\"))\n else:\n prefix = ctx.clean_prefix\n await ctx.send(_(RUNNING_ANNOUNCEMENT).format(prefix=prefix))\n\n @announce.command(name=\"cancel\")\n async def announce_cancel(self, ctx):\n \"\"\"Cancel a running announce.\"\"\"\n if not self.is_announcing():\n await ctx.send(_(\"There is no currently running announcement.\"))\n return\n self.__current_announcer.cancel()\n await ctx.send(_(\"The current announcement has been cancelled.\"))\n\n @commands.group()\n @commands.guild_only()\n @checks.guildowner_or_permissions(administrator=True)\n async def announceset(self, ctx):\n \"\"\"Change how announcements are sent in this guild.\"\"\"\n pass\n\n @announceset.command(name=\"channel\")\n async def announceset_channel(self, ctx, *, channel: discord.TextChannel = None):\n \"\"\"\n Change the channel where the bot will send announcements.\n\n If channel is left blank it defaults to the current channel.\n \"\"\"\n if channel is None:\n channel = ctx.channel\n await self.config.guild(ctx.guild).announce_channel.set(channel.id)\n await ctx.send(\n _(\"The announcement channel has been set to {channel.mention}\").format(channel=channel)\n )\n\n @announceset.command(name=\"clearchannel\")\n async def announceset_clear_channel(self, ctx):\n \"\"\"Unsets the channel for announcements.\"\"\"\n await self.config.guild(ctx.guild).announce_channel.clear()\n await ctx.tick()\n\n async def _valid_selfroles(self, guild: discord.Guild) -> Tuple[discord.Role]:\n \"\"\"\n Returns a tuple of valid selfroles\n :param guild:\n :return:\n \"\"\"\n selfrole_ids = set(await self.config.guild(guild).selfroles())\n guild_roles = guild.roles\n\n valid_roles = tuple(r for r in guild_roles if r.id in selfrole_ids)\n valid_role_ids = set(r.id for r in valid_roles)\n\n if selfrole_ids != valid_role_ids:\n await self.config.guild(guild).selfroles.set(list(valid_role_ids))\n\n # noinspection PyTypeChecker\n return valid_roles\n\n @commands.guild_only()\n @commands.group()\n async def selfrole(self, ctx: commands.Context):\n \"\"\"Apply selfroles.\"\"\"\n pass\n\n @selfrole.command(name=\"add\")\n async def selfrole_add(self, ctx: commands.Context, *, selfrole: SelfRole):\n \"\"\"\n Add a selfrole to yourself.\n\n Server admins must have configured the role as user settable.\n NOTE: The role is case sensitive!\n \"\"\"\n # noinspection PyTypeChecker\n await self._addrole(ctx, ctx.author, selfrole, check_user=False)\n\n @selfrole.command(name=\"remove\")\n async def selfrole_remove(self, ctx: commands.Context, *, selfrole: SelfRole):\n \"\"\"\n Remove a selfrole from yourself.\n\n Server admins must have configured the role as user settable.\n NOTE: The role is case sensitive!\n \"\"\"\n # noinspection PyTypeChecker\n await self._removerole(ctx, ctx.author, selfrole, check_user=False)\n\n @selfrole.command(name=\"list\")\n async def selfrole_list(self, ctx: commands.Context):\n \"\"\"\n Lists all available selfroles.\n \"\"\"\n selfroles = await self._valid_selfroles(ctx.guild)\n fmt_selfroles = \"\\n\".join([\"+ \" + r.name for r in selfroles])\n\n if not fmt_selfroles:\n await ctx.send(\"There are currently no selfroles.\")\n return\n\n msg = _(\"Available Selfroles:\\n{selfroles}\").format(selfroles=fmt_selfroles)\n await ctx.send(box(msg, \"diff\"))\n\n @commands.group()\n @checks.admin_or_permissions(manage_roles=True)\n async def selfroleset(self, ctx: commands.Context):\n \"\"\"Manage selfroles.\"\"\"\n pass\n\n @selfroleset.command(name=\"add\")\n async def selfroleset_add(self, ctx: commands.Context, *, role: discord.Role):\n \"\"\"\n Add a role to the list of available selfroles.\n\n NOTE: The role is case sensitive!\n \"\"\"\n if not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(\n _(\n \"I cannot let you add {role.name} as a selfrole because that role is higher than or equal to your highest role in the Discord hierarchy.\"\n ).format(role=role)\n )\n return\n async with self.config.guild(ctx.guild).selfroles() as curr_selfroles:\n if role.id not in curr_selfroles:\n curr_selfroles.append(role.id)\n await ctx.send(_(\"Added.\"))\n return\n\n await ctx.send(_(\"That role is already a selfrole.\"))\n\n @selfroleset.command(name=\"remove\")\n async def selfroleset_remove(self, ctx: commands.Context, *, role: SelfRole):\n \"\"\"\n Remove a role from the list of available selfroles.\n\n NOTE: The role is case sensitive!\n \"\"\"\n if not self.pass_user_hierarchy_check(ctx, role):\n await ctx.send(\n _(\n \"I cannot let you remove {role.name} from being a selfrole because that role is higher than or equal to your highest role in the Discord hierarchy.\"\n ).format(role=role)\n )\n return\n async with self.config.guild(ctx.guild).selfroles() as curr_selfroles:\n curr_selfroles.remove(role.id)\n\n await ctx.send(_(\"Removed.\"))\n\n @commands.command()\n @checks.is_owner()\n async def serverlock(self, ctx: commands.Context):\n \"\"\"Lock a bot to its current servers only.\"\"\"\n serverlocked = await self.config.serverlocked()\n await self.config.serverlocked.set(not serverlocked)\n\n if serverlocked:\n await ctx.send(_(\"The bot is no longer serverlocked.\"))\n else:\n await ctx.send(_(\"The bot is now serverlocked.\"))\n\n # region Event Handlers\n @commands.Cog.listener()\n async def on_guild_join(self, guild: discord.Guild):\n if await self.config.serverlocked():\n await guild.leave()\n\n\n# endregion\n\n"}}},{"rowIdx":455,"cells":{"text":{"kind":"string","value":"\nimport pytest\nimport pytest_httpbin\nimport vcr\nfrom vcr.patch import force_reset\nfrom assertions import assert_cassette_empty, assert_is_json\n\nurllib3 = pytest.importorskip(\"urllib3\")\n\n\n@pytest.fixture(scope=\"module\")\ndef verify_pool_mgr():\n return urllib3.PoolManager(\n cert_reqs=\"CERT_REQUIRED\", ca_certs=pytest_httpbin.certs.where() # Force certificate check.\n )\n\n\n@pytest.fixture(scope=\"module\")\ndef pool_mgr():\n return urllib3.PoolManager(cert_reqs=\"CERT_NONE\")\n\n\ndef test_status_code(httpbin_both, tmpdir, verify_pool_mgr):\n \"\"\"Ensure that we can read the status code\"\"\"\n url = httpbin_both.url\n with vcr.use_cassette(str(tmpdir.join(\"atts.yaml\"))):\n status_code = verify_pool_mgr.request(\"GET\", url).status\n\n with vcr.use_cassette(str(tmpdir.join(\"atts.yaml\"))):\n assert status_code == verify_pool_mgr.request(\"GET\", url).status\n\n\ndef test_headers(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure that we can read the headers back\"\"\"\n url = httpbin_both.url\n with vcr.use_cassette(str(tmpdir.join(\"headers.yaml\"))):\n headers = verify_pool_mgr.request(\"GET\", url).headers\n\n with vcr.use_cassette(str(tmpdir.join(\"headers.yaml\"))):\n assert headers == verify_pool_mgr.request(\"GET\", url).headers\n\n\ndef test_body(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure the responses are all identical enough\"\"\"\n url = httpbin_both.url + \"/bytes/1024\"\n with vcr.use_cassette(str(tmpdir.join(\"body.yaml\"))):\n content = verify_pool_mgr.request(\"GET\", url).data\n\n with vcr.use_cassette(str(tmpdir.join(\"body.yaml\"))):\n assert content == verify_pool_mgr.request(\"GET\", url).data\n\n\ndef test_auth(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure that we can handle basic auth\"\"\"\n auth = (\"user\", \"passwd\")\n headers = urllib3.util.make_headers(basic_auth=\"{}:{}\".format(*auth))\n url = httpbin_both.url + \"/basic-auth/user/passwd\"\n with vcr.use_cassette(str(tmpdir.join(\"auth.yaml\"))):\n one = verify_pool_mgr.request(\"GET\", url, headers=headers)\n\n with vcr.use_cassette(str(tmpdir.join(\"auth.yaml\"))):\n two = verify_pool_mgr.request(\"GET\", url, headers=headers)\n assert one.data == two.data\n assert one.status == two.status\n\n\ndef test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure that we can save failed auth statuses\"\"\"\n auth = (\"user\", \"wrongwrongwrong\")\n headers = urllib3.util.make_headers(basic_auth=\"{}:{}\".format(*auth))\n url = httpbin_both.url + \"/basic-auth/user/passwd\"\n with vcr.use_cassette(str(tmpdir.join(\"auth-failed.yaml\"))) as cass:\n # Ensure that this is empty to begin with\n assert_cassette_empty(cass)\n one = verify_pool_mgr.request(\"GET\", url, headers=headers)\n two = verify_pool_mgr.request(\"GET\", url, headers=headers)\n assert one.data == two.data\n assert one.status == two.status == 401\n\n\ndef test_post(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure that we can post and cache the results\"\"\"\n data = {\"key1\": \"value1\", \"key2\": \"value2\"}\n url = httpbin_both.url + \"/post\"\n with vcr.use_cassette(str(tmpdir.join(\"verify_pool_mgr.yaml\"))):\n req1 = verify_pool_mgr.request(\"POST\", url, data).data\n\n with vcr.use_cassette(str(tmpdir.join(\"verify_pool_mgr.yaml\"))):\n req2 = verify_pool_mgr.request(\"POST\", url, data).data\n\n assert req1 == req2\n\n\ndef test_redirects(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"Ensure that we can handle redirects\"\"\"\n url = httpbin_both.url + \"/redirect-to?url=bytes/1024\"\n with vcr.use_cassette(str(tmpdir.join(\"verify_pool_mgr.yaml\"))):\n content = verify_pool_mgr.request(\"GET\", url).data\n\n with vcr.use_cassette(str(tmpdir.join(\"verify_pool_mgr.yaml\"))) as cass:\n assert content == verify_pool_mgr.request(\"GET\", url).data\n # Ensure that we've now cached *two* responses. One for the redirect\n # and one for the final fetch\n assert len(cass) == 2\n assert cass.play_count == 2\n\n\ndef test_cross_scheme(tmpdir, httpbin, httpbin_secure, verify_pool_mgr):\n \"\"\"Ensure that requests between schemes are treated separately\"\"\"\n # First fetch a url under http, and then again under https and then\n # ensure that we haven't served anything out of cache, and we have two\n # requests / response pairs in the cassette\n with vcr.use_cassette(str(tmpdir.join(\"cross_scheme.yaml\"))) as cass:\n verify_pool_mgr.request(\"GET\", httpbin_secure.url)\n verify_pool_mgr.request(\"GET\", httpbin.url)\n assert cass.play_count == 0\n assert len(cass) == 2\n\n\ndef test_gzip(tmpdir, httpbin_both, verify_pool_mgr):\n \"\"\"\n Ensure that requests (actually urllib3) is able to automatically decompress\n the response body\n \"\"\"\n url = httpbin_both.url + \"/gzip\"\n response = verify_pool_mgr.request(\"GET\", url)\n\n with vcr.use_cassette(str(tmpdir.join(\"gzip.yaml\"))):\n response = verify_pool_mgr.request(\"GET\", url)\n assert_is_json(response.data)\n\n with vcr.use_cassette(str(tmpdir.join(\"gzip.yaml\"))):\n assert_is_json(response.data)\n\n\ndef test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):\n with vcr.use_cassette(str(tmpdir.join(\"cert_validation_disabled.yaml\"))):\n pool_mgr.request(\"GET\", httpbin_secure.url)\n\n\ndef test_urllib3_force_reset():\n cpool = urllib3.connectionpool\n http_original = cpool.HTTPConnection\n https_original = cpool.HTTPSConnection\n verified_https_original = cpool.VerifiedHTTPSConnection\n with vcr.use_cassette(path=\"test\"):\n first_cassette_HTTPConnection = cpool.HTTPConnection\n first_cassette_HTTPSConnection = cpool.HTTPSConnection\n first_cassette_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection\n with force_reset():\n assert cpool.HTTPConnection is http_original\n assert cpool.HTTPSConnection is https_original\n assert cpool.VerifiedHTTPSConnection is verified_https_original\n assert cpool.HTTPConnection is first_cassette_HTTPConnection\n assert cpool.HTTPSConnection is first_cassette_HTTPSConnection\n assert cpool.VerifiedHTTPSConnection is first_cassette_VerifiedHTTPSConnection\n\n"}}},{"rowIdx":456,"cells":{"text":{"kind":"string","value":"\nimport cerberus\nimport cerberus.errors\n\nbase_schema = {\n 'ansible': {\n 'type': 'dict',\n 'schema': {\n 'config_file': {\n 'type': 'string',\n },\n 'playbook': {\n 'type': 'string',\n },\n 'raw_env_vars': {\n 'type': 'dict',\n 'keyschema': {\n 'type': 'string',\n 'regex': '^[A-Z0-9_-]+$',\n },\n },\n 'extra_vars': {\n 'type': 'string',\n },\n 'verbose': {\n 'type': 'boolean',\n },\n 'become': {\n 'type': 'boolean',\n },\n 'tags': {\n 'type': 'string',\n },\n }\n },\n 'driver': {\n 'type': 'dict',\n 'schema': {\n 'name': {\n 'type': 'string',\n },\n }\n },\n 'vagrant': {\n 'type': 'dict',\n 'schema': {\n 'platforms': {\n 'type': 'list',\n 'schema': {\n 'type': 'dict',\n 'schema': {\n 'name': {\n 'type': 'string',\n },\n 'box': {\n 'type': 'string',\n },\n 'box_version': {\n 'type': 'string',\n },\n 'box_url': {\n 'type': 'string',\n },\n }\n }\n },\n 'providers': {\n 'type': 'list',\n 'schema': {\n 'type': 'dict',\n 'schema': {\n 'name': {\n 'type': 'string',\n },\n 'type': {\n 'type': 'string',\n },\n 'options': {\n 'type': 'dict',\n },\n }\n }\n },\n 'instances': {\n 'type': 'list',\n 'schema': {\n 'type': 'dict',\n 'schema': {\n 'name': {\n 'type': 'string',\n },\n 'ansible_groups': {\n 'type': 'list',\n 'schema': {\n 'type': 'string',\n }\n },\n 'interfaces': {\n 'type': 'list',\n 'schema': {\n 'type': 'dict',\n }\n },\n 'raw_config_args': {\n 'type': 'list',\n 'schema': {\n 'type': 'string',\n }\n },\n }\n }\n },\n }\n },\n 'verifier': {\n 'type': 'dict',\n 'schema': {\n 'name': {\n 'type': 'string',\n },\n 'options': {\n 'type': 'dict',\n },\n }\n },\n}\n\n\ndef validate(c):\n v = cerberus.Validator()\n v.validate(c, base_schema)\n\n return v.errors\n\n"}}},{"rowIdx":457,"cells":{"text":{"kind":"string","value":"\nimport voluptuous as vol\n\nimport homeassistant.helpers.config_validation as cv\n\nCONF_ACCESS_KEY = \"access_key\"\nCONF_APP_ID = \"app_id\"\n\nDATA_TTN = \"data_thethingsnetwork\"\nDOMAIN = \"thethingsnetwork\"\n\nTTN_ACCESS_KEY = \"ttn_access_key\"\nTTN_APP_ID = \"ttn_app_id\"\nTTN_DATA_STORAGE_URL = (\n \"https://{app_id}.data.thethingsnetwork.org/{endpoint}/{device_id}\"\n)\n\nCONFIG_SCHEMA = vol.Schema(\n {\n DOMAIN: vol.Schema(\n {\n vol.Required(CONF_APP_ID): cv.string,\n vol.Required(CONF_ACCESS_KEY): cv.string,\n }\n )\n },\n extra=vol.ALLOW_EXTRA,\n)\n\n\nasync def async_setup(hass, config):\n \"\"\"Initialize of The Things Network component.\"\"\"\n conf = config[DOMAIN]\n app_id = conf.get(CONF_APP_ID)\n access_key = conf.get(CONF_ACCESS_KEY)\n\n hass.data[DATA_TTN] = {TTN_ACCESS_KEY: access_key, TTN_APP_ID: app_id}\n\n return True\n\n"}}},{"rowIdx":458,"cells":{"text":{"kind":"string","value":"\nimport nltk\n\nfrom .unit import Unit\n\n\nclass Stemming(Unit):\n \"\"\"\n Process unit for token stemming.\n\n :param stemmer: stemmer to use, `porter` or `lancaster`.\n \"\"\"\n\n def __init__(self, stemmer='porter'):\n \"\"\"Initialization.\"\"\"\n self.stemmer = stemmer\n\n def transform(self, input_: list) -> list:\n \"\"\"\n Reducing inflected words to their word stem, base or root form.\n\n :param input_: list of string to be stemmed.\n \"\"\"\n if self.stemmer == 'porter':\n porter_stemmer = nltk.stem.PorterStemmer()\n return [porter_stemmer.stem(token) for token in input_]\n elif self.stemmer == 'lancaster' or self.stemmer == 'krovetz':\n lancaster_stemmer = nltk.stem.lancaster.LancasterStemmer()\n return [lancaster_stemmer.stem(token) for token in input_]\n else:\n raise ValueError(\n 'Not supported supported stemmer type: {}'.format(\n self.stemmer))\n\n"}}},{"rowIdx":459,"cells":{"text":{"kind":"string","value":"\nimport sys\nfrom unittest.mock import Mock\n\nimport twisted.internet\nfrom twisted.trial import unittest\n\nfrom autobahn.twisted import choosereactor\n\n\nclass ChooseReactorTests(unittest.TestCase):\n\n def patch_reactor(self, name, new_reactor):\n \"\"\"\n Patch ``name`` so that Twisted will grab a fake reactor instead of\n a real one.\n \"\"\"\n if hasattr(twisted.internet, name):\n self.patch(twisted.internet, name, new_reactor)\n else:\n def _cleanup():\n delattr(twisted.internet, name)\n setattr(twisted.internet, name, new_reactor)\n\n def patch_modules(self):\n \"\"\"\n Patch ``sys.modules`` so that Twisted believes there is no\n installed reactor.\n \"\"\"\n old_modules = dict(sys.modules)\n\n new_modules = dict(sys.modules)\n del new_modules[\"twisted.internet.reactor\"]\n\n def _cleanup():\n sys.modules = old_modules\n\n self.addCleanup(_cleanup)\n sys.modules = new_modules\n\n def test_unknown(self):\n \"\"\"\n ``install_optimal_reactor`` will use the default reactor if it is\n unable to detect the platform it is running on.\n \"\"\"\n reactor_mock = Mock()\n self.patch_reactor(\"selectreactor\", reactor_mock)\n self.patch(sys, \"platform\", \"unknown\")\n\n # Emulate that a reactor has not been installed\n self.patch_modules()\n\n choosereactor.install_optimal_reactor()\n reactor_mock.install.assert_called_once_with()\n\n def test_mac(self):\n \"\"\"\n ``install_optimal_reactor`` will install KQueueReactor on\n Darwin (OS X).\n \"\"\"\n reactor_mock = Mock()\n self.patch_reactor(\"kqreactor\", reactor_mock)\n self.patch(sys, \"platform\", \"darwin\")\n\n # Emulate that a reactor has not been installed\n self.patch_modules()\n\n choosereactor.install_optimal_reactor()\n reactor_mock.install.assert_called_once_with()\n\n def test_win(self):\n \"\"\"\n ``install_optimal_reactor`` will install IOCPReactor on Windows.\n \"\"\"\n if sys.platform != 'win32':\n raise unittest.SkipTest('unit test requires Windows')\n\n reactor_mock = Mock()\n self.patch_reactor(\"iocpreactor\", reactor_mock)\n self.patch(sys, \"platform\", \"win32\")\n\n # Emulate that a reactor has not been installed\n self.patch_modules()\n\n choosereactor.install_optimal_reactor()\n reactor_mock.install.assert_called_once_with()\n\n def test_bsd(self):\n \"\"\"\n ``install_optimal_reactor`` will install KQueueReactor on BSD.\n \"\"\"\n reactor_mock = Mock()\n self.patch_reactor(\"kqreactor\", reactor_mock)\n self.patch(sys, \"platform\", \"freebsd11\")\n\n # Emulate that a reactor has not been installed\n self.patch_modules()\n\n choosereactor.install_optimal_reactor()\n reactor_mock.install.assert_called_once_with()\n\n def test_linux(self):\n \"\"\"\n ``install_optimal_reactor`` will install EPollReactor on Linux.\n \"\"\"\n reactor_mock = Mock()\n self.patch_reactor(\"epollreactor\", reactor_mock)\n self.patch(sys, \"platform\", \"linux\")\n\n # Emulate that a reactor has not been installed\n self.patch_modules()\n\n choosereactor.install_optimal_reactor()\n reactor_mock.install.assert_called_once_with()\n\n"}}},{"rowIdx":460,"cells":{"text":{"kind":"string","value":"\nimport os\nimport sys\n\nfrom coverage import env\nfrom coverage.backward import litems, range # pylint: disable=redefined-builtin\nfrom coverage.debug import short_stack\nfrom coverage.disposition import FileDisposition\nfrom coverage.misc import CoverageException, isolate_module\nfrom coverage.pytracer import PyTracer\n\nos = isolate_module(os)\n\n\ntry:\n # Use the C extension code when we can, for speed.\n from coverage.tracer import CTracer, CFileDisposition\nexcept ImportError:\n # Couldn't import the C extension, maybe it isn't built.\n if os.getenv('COVERAGE_TEST_TRACER') == 'c':\n # During testing, we use the COVERAGE_TEST_TRACER environment variable\n # to indicate that we've fiddled with the environment to test this\n # fallback code. If we thought we had a C tracer, but couldn't import\n # it, then exit quickly and clearly instead of dribbling confusing\n # errors. I'm using sys.exit here instead of an exception because an\n # exception here causes all sorts of other noise in unittest.\n sys.stderr.write(\"*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\\n\")\n sys.exit(1)\n CTracer = None\n\n\nclass Collector(object):\n \"\"\"Collects trace data.\n\n Creates a Tracer object for each thread, since they track stack\n information. Each Tracer points to the same shared data, contributing\n traced data points.\n\n When the Collector is started, it creates a Tracer for the current thread,\n and installs a function to create Tracers for each new thread started.\n When the Collector is stopped, all active Tracers are stopped.\n\n Threads started while the Collector is stopped will never have Tracers\n associated with them.\n\n \"\"\"\n\n # The stack of active Collectors. Collectors are added here when started,\n # and popped when stopped. Collectors on the stack are paused when not\n # the top, and resumed when they become the top again.\n _collectors = []\n\n # The concurrency settings we support here.\n SUPPORTED_CONCURRENCIES = set([\"greenlet\", \"eventlet\", \"gevent\", \"thread\"])\n\n def __init__(\n self, should_trace, check_include, should_start_context, file_mapper,\n timid, branch, warn, concurrency,\n ):\n \"\"\"Create a collector.\n\n `should_trace` is a function, taking a file name and a frame, and\n returning a `coverage.FileDisposition object`.\n\n `check_include` is a function taking a file name and a frame. It returns\n a boolean: True if the file should be traced, False if not.\n\n `should_start_context` is a function taking a frame, and returning a\n string. If the frame should be the start of a new context, the string\n is the new context. If the frame should not be the start of a new\n context, return None.\n\n `file_mapper` is a function taking a filename, and returning a Unicode\n filename. The result is the name that will be recorded in the data\n file.\n\n If `timid` is true, then a slower simpler trace function will be\n used. This is important for some environments where manipulation of\n tracing functions make the faster more sophisticated trace function not\n operate properly.\n\n If `branch` is true, then branches will be measured. This involves\n collecting data on which statements followed each other (arcs). Use\n `get_arc_data` to get the arc data.\n\n `warn` is a warning function, taking a single string message argument\n and an optional slug argument which will be a string or None, to be\n used if a warning needs to be issued.\n\n `concurrency` is a list of strings indicating the concurrency libraries\n in use. Valid values are \"greenlet\", \"eventlet\", \"gevent\", or \"thread\"\n (the default). Of these four values, only one can be supplied. Other\n values are ignored.\n\n \"\"\"\n self.should_trace = should_trace\n self.check_include = check_include\n self.should_start_context = should_start_context\n self.file_mapper = file_mapper\n self.warn = warn\n self.branch = branch\n self.threading = None\n self.covdata = None\n\n self.static_context = None\n\n self.origin = short_stack()\n\n self.concur_id_func = None\n self.mapped_file_cache = {}\n\n # We can handle a few concurrency options here, but only one at a time.\n these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection(concurrency)\n if len(these_concurrencies) > 1:\n raise CoverageException(\"Conflicting concurrency settings: %s\" % concurrency)\n self.concurrency = these_concurrencies.pop() if these_concurrencies else ''\n\n try:\n if self.concurrency == \"greenlet\":\n import greenlet\n self.concur_id_func = greenlet.getcurrent\n elif self.concurrency == \"eventlet\":\n import eventlet.greenthread # pylint: disable=import-error,useless-suppression\n self.concur_id_func = eventlet.greenthread.getcurrent\n elif self.concurrency == \"gevent\":\n import gevent # pylint: disable=import-error,useless-suppression\n self.concur_id_func = gevent.getcurrent\n elif self.concurrency == \"thread\" or not self.concurrency:\n # It's important to import threading only if we need it. If\n # it's imported early, and the program being measured uses\n # gevent, then gevent's monkey-patching won't work properly.\n import threading\n self.threading = threading\n else:\n raise CoverageException(\"Don't understand concurrency=%s\" % concurrency)\n except ImportError:\n raise CoverageException(\n \"Couldn't trace with concurrency=%s, the module isn't installed.\" % (\n self.concurrency,\n )\n )\n\n self.reset()\n\n if timid:\n # Being timid: use the simple Python trace function.\n self._trace_class = PyTracer\n else:\n # Being fast: use the C Tracer if it is available, else the Python\n # trace function.\n self._trace_class = CTracer or PyTracer\n\n if self._trace_class is CTracer:\n self.file_disposition_class = CFileDisposition\n self.supports_plugins = True\n else:\n self.file_disposition_class = FileDisposition\n self.supports_plugins = False\n\n def __repr__(self):\n return \"\" % (id(self), self.tracer_name())\n\n def use_data(self, covdata, context):\n \"\"\"Use `covdata` for recording data.\"\"\"\n self.covdata = covdata\n self.static_context = context\n self.covdata.set_context(self.static_context)\n\n def tracer_name(self):\n \"\"\"Return the class name of the tracer we're using.\"\"\"\n return self._trace_class.__name__\n\n def _clear_data(self):\n \"\"\"Clear out existing data, but stay ready for more collection.\"\"\"\n # We used to used self.data.clear(), but that would remove filename\n # keys and data values that were still in use higher up the stack\n # when we are called as part of switch_context.\n for d in self.data.values():\n d.clear()\n\n for tracer in self.tracers:\n tracer.reset_activity()\n\n def reset(self):\n \"\"\"Clear collected data, and prepare to collect more.\"\"\"\n # A dictionary mapping file names to dicts with line number keys (if not\n # branch coverage), or mapping file names to dicts with line number\n # pairs as keys (if branch coverage).\n self.data = {}\n\n # A dictionary mapping file names to file tracer plugin names that will\n # handle them.\n self.file_tracers = {}\n\n self.disabled_plugins = set()\n\n # The .should_trace_cache attribute is a cache from file names to\n # coverage.FileDisposition objects, or None. When a file is first\n # considered for tracing, a FileDisposition is obtained from\n # Coverage.should_trace. Its .trace attribute indicates whether the\n # file should be traced or not. If it should be, a plugin with dynamic\n # file names can decide not to trace it based on the dynamic file name\n # being excluded by the inclusion rules, in which case the\n # FileDisposition will be replaced by None in the cache.\n if env.PYPY:\n import __pypy__ # pylint: disable=import-error\n # Alex Gaynor said:\n # should_trace_cache is a strictly growing key: once a key is in\n # it, it never changes. Further, the keys used to access it are\n # generally constant, given sufficient context. That is to say, at\n # any given point _trace() is called, pypy is able to know the key.\n # This is because the key is determined by the physical source code\n # line, and that's invariant with the call site.\n #\n # This property of a dict with immutable keys, combined with\n # call-site-constant keys is a match for PyPy's module dict,\n # which is optimized for such workloads.\n #\n # This gives a 20% benefit on the workload described at\n # https://bitbucket.org/pypy/pypy/issue/1871/10x-slower-than-cpython-under-coverage\n self.should_trace_cache = __pypy__.newdict(\"module\")\n else:\n self.should_trace_cache = {}\n\n # Our active Tracers.\n self.tracers = []\n\n self._clear_data()\n\n def _start_tracer(self):\n \"\"\"Start a new Tracer object, and store it in self.tracers.\"\"\"\n tracer = self._trace_class()\n tracer.data = self.data\n tracer.trace_arcs = self.branch\n tracer.should_trace = self.should_trace\n tracer.should_trace_cache = self.should_trace_cache\n tracer.warn = self.warn\n\n if hasattr(tracer, 'concur_id_func'):\n tracer.concur_id_func = self.concur_id_func\n elif self.concur_id_func:\n raise CoverageException(\n \"Can't support concurrency=%s with %s, only threads are supported\" % (\n self.concurrency, self.tracer_name(),\n )\n )\n\n if hasattr(tracer, 'file_tracers'):\n tracer.file_tracers = self.file_tracers\n if hasattr(tracer, 'threading'):\n tracer.threading = self.threading\n if hasattr(tracer, 'check_include'):\n tracer.check_include = self.check_include\n if hasattr(tracer, 'should_start_context'):\n tracer.should_start_context = self.should_start_context\n tracer.switch_context = self.switch_context\n if hasattr(tracer, 'disable_plugin'):\n tracer.disable_plugin = self.disable_plugin\n\n fn = tracer.start()\n self.tracers.append(tracer)\n\n return fn\n\n # The trace function has to be set individually on each thread before\n # execution begins. Ironically, the only support the threading module has\n # for running code before the thread main is the tracing function. So we\n # install this as a trace function, and the first time it's called, it does\n # the real trace installation.\n\n def _installation_trace(self, frame, event, arg):\n \"\"\"Called on new threads, installs the real tracer.\"\"\"\n # Remove ourselves as the trace function.\n sys.settrace(None)\n # Install the real tracer.\n fn = self._start_tracer()\n # Invoke the real trace function with the current event, to be sure\n # not to lose an event.\n if fn:\n fn = fn(frame, event, arg)\n # Return the new trace function to continue tracing in this scope.\n return fn\n\n def start(self):\n \"\"\"Start collecting trace information.\"\"\"\n if self._collectors:\n self._collectors[-1].pause()\n\n self.tracers = []\n\n # Check to see whether we had a fullcoverage tracer installed. If so,\n # get the stack frames it stashed away for us.\n traces0 = []\n fn0 = sys.gettrace()\n if fn0:\n tracer0 = getattr(fn0, '__self__', None)\n if tracer0:\n traces0 = getattr(tracer0, 'traces', [])\n\n try:\n # Install the tracer on this thread.\n fn = self._start_tracer()\n except:\n if self._collectors:\n self._collectors[-1].resume()\n raise\n\n # If _start_tracer succeeded, then we add ourselves to the global\n # stack of collectors.\n self._collectors.append(self)\n\n # Replay all the events from fullcoverage into the new trace function.\n for args in traces0:\n (frame, event, arg), lineno = args\n try:\n fn(frame, event, arg, lineno=lineno)\n except TypeError:\n raise Exception(\"fullcoverage must be run with the C trace function.\")\n\n # Install our installation tracer in threading, to jump-start other\n # threads.\n if self.threading:\n self.threading.settrace(self._installation_trace)\n\n def stop(self):\n \"\"\"Stop collecting trace information.\"\"\"\n assert self._collectors\n if self._collectors[-1] is not self:\n print(\"self._collectors:\")\n for c in self._collectors:\n print(\" {!r}\\n{}\".format(c, c.origin))\n assert self._collectors[-1] is self, (\n \"Expected current collector to be %r, but it's %r\" % (self, self._collectors[-1])\n )\n\n self.pause()\n\n # Remove this Collector from the stack, and resume the one underneath\n # (if any).\n self._collectors.pop()\n if self._collectors:\n self._collectors[-1].resume()\n\n def pause(self):\n \"\"\"Pause tracing, but be prepared to `resume`.\"\"\"\n for tracer in self.tracers:\n tracer.stop()\n stats = tracer.get_stats()\n if stats:\n print(\"\\nCoverage.py tracer stats:\")\n for k in sorted(stats.keys()):\n print(\"%20s: %s\" % (k, stats[k]))\n if self.threading:\n self.threading.settrace(None)\n\n def resume(self):\n \"\"\"Resume tracing after a `pause`.\"\"\"\n for tracer in self.tracers:\n tracer.start()\n if self.threading:\n self.threading.settrace(self._installation_trace)\n else:\n self._start_tracer()\n\n def _activity(self):\n \"\"\"Has any activity been traced?\n\n Returns a boolean, True if any trace function was invoked.\n\n \"\"\"\n return any(tracer.activity() for tracer in self.tracers)\n\n def switch_context(self, new_context):\n \"\"\"Switch to a new dynamic context.\"\"\"\n self.flush_data()\n if self.static_context:\n context = self.static_context\n if new_context:\n context += \"|\" + new_context\n else:\n context = new_context\n self.covdata.set_context(context)\n\n def disable_plugin(self, disposition):\n \"\"\"Disable the plugin mentioned in `disposition`.\"\"\"\n file_tracer = disposition.file_tracer\n plugin = file_tracer._coverage_plugin\n plugin_name = plugin._coverage_plugin_name\n self.warn(\"Disabling plug-in {!r} due to previous exception\".format(plugin_name))\n plugin._coverage_enabled = False\n disposition.trace = False\n\n def cached_mapped_file(self, filename):\n \"\"\"A locally cached version of file names mapped through file_mapper.\"\"\"\n key = (type(filename), filename)\n try:\n return self.mapped_file_cache[key]\n except KeyError:\n return self.mapped_file_cache.setdefault(key, self.file_mapper(filename))\n\n def mapped_file_dict(self, d):\n \"\"\"Return a dict like d, but with keys modified by file_mapper.\"\"\"\n # The call to litems() ensures that the GIL protects the dictionary\n # iterator against concurrent modifications by tracers running\n # in other threads. We try three times in case of concurrent\n # access, hoping to get a clean copy.\n runtime_err = None\n for _ in range(3):\n try:\n items = litems(d)\n except RuntimeError as ex:\n runtime_err = ex\n else:\n break\n else:\n raise runtime_err\n\n return dict((self.cached_mapped_file(k), v) for k, v in items if v)\n\n def plugin_was_disabled(self, plugin):\n \"\"\"Record that `plugin` was disabled during the run.\"\"\"\n self.disabled_plugins.add(plugin._coverage_plugin_name)\n\n def flush_data(self):\n \"\"\"Save the collected data to our associated `CoverageData`.\n\n Data may have also been saved along the way. This forces the\n last of the data to be saved.\n\n Returns True if there was data to save, False if not.\n \"\"\"\n if not self._activity():\n return False\n\n if self.branch:\n self.covdata.add_arcs(self.mapped_file_dict(self.data))\n else:\n self.covdata.add_lines(self.mapped_file_dict(self.data))\n\n file_tracers = {\n k: v for k, v in self.file_tracers.items()\n if v not in self.disabled_plugins\n }\n self.covdata.add_file_tracers(self.mapped_file_dict(file_tracers))\n\n self._clear_data()\n return True\n\n"}}},{"rowIdx":461,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nimport logging\n\nimport async_timeout\nfrom pyflick.authentication import AuthException, SimpleFlickAuth\nfrom pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET\nimport voluptuous as vol\n\nfrom homeassistant import config_entries, exceptions\nfrom homeassistant.const import (\n CONF_CLIENT_ID,\n CONF_CLIENT_SECRET,\n CONF_PASSWORD,\n CONF_USERNAME,\n)\nfrom homeassistant.helpers import aiohttp_client\n\nfrom .const import DOMAIN # pylint: disable=unused-import\n\n_LOGGER = logging.getLogger(__name__)\n\nDATA_SCHEMA = vol.Schema(\n {\n vol.Required(CONF_USERNAME): str,\n vol.Required(CONF_PASSWORD): str,\n vol.Optional(CONF_CLIENT_ID): str,\n vol.Optional(CONF_CLIENT_SECRET): str,\n }\n)\n\n\nclass FlickConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):\n \"\"\"Flick config flow.\"\"\"\n\n VERSION = 1\n CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL\n\n async def _validate_input(self, user_input):\n auth = SimpleFlickAuth(\n username=user_input[CONF_USERNAME],\n password=user_input[CONF_PASSWORD],\n websession=aiohttp_client.async_get_clientsession(self.hass),\n client_id=user_input.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),\n client_secret=user_input.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),\n )\n\n try:\n with async_timeout.timeout(60):\n token = await auth.async_get_access_token()\n except asyncio.TimeoutError as err:\n raise CannotConnect() from err\n except AuthException as err:\n raise InvalidAuth() from err\n else:\n return token is not None\n\n async def async_step_user(self, user_input=None):\n \"\"\"Handle gathering login info.\"\"\"\n errors = {}\n if user_input is not None:\n try:\n await self._validate_input(user_input)\n except CannotConnect:\n errors[\"base\"] = \"cannot_connect\"\n except InvalidAuth:\n errors[\"base\"] = \"invalid_auth\"\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Unexpected exception\")\n errors[\"base\"] = \"unknown\"\n else:\n await self.async_set_unique_id(\n f\"flick_electric_{user_input[CONF_USERNAME]}\"\n )\n self._abort_if_unique_id_configured()\n\n return self.async_create_entry(\n title=f\"Flick Electric: {user_input[CONF_USERNAME]}\",\n data=user_input,\n )\n\n return self.async_show_form(\n step_id=\"user\", data_schema=DATA_SCHEMA, errors=errors\n )\n\n\nclass CannotConnect(exceptions.HomeAssistantError):\n \"\"\"Error to indicate we cannot connect.\"\"\"\n\n\nclass InvalidAuth(exceptions.HomeAssistantError):\n \"\"\"Error to indicate there is invalid auth.\"\"\"\n\n"}}},{"rowIdx":462,"cells":{"text":{"kind":"string","value":"\nimport json\n\nfrom django.conf import settings\n\nimport weblate\nfrom weblate.machinery.base import (\n MachineTranslation,\n MachineTranslationError,\n MissingConfiguration,\n)\n\n\nclass ModernMTTranslation(MachineTranslation):\n \"\"\"ModernMT machine translation support.\"\"\"\n\n name = \"ModernMT\"\n max_score = 90\n\n def __init__(self):\n \"\"\"Check configuration.\"\"\"\n super().__init__()\n if settings.MT_MODERNMT_KEY is None:\n raise MissingConfiguration(\"ModernMT requires API key\")\n\n def get_authentication(self):\n \"\"\"Hook for backends to allow add authentication headers to request.\"\"\"\n return {\n \"MMT-ApiKey\": settings.MT_MODERNMT_KEY,\n \"MMT-Platform\": \"Weblate\",\n \"MMT-PlatformVersion\": weblate.VERSION,\n }\n\n def is_supported(self, source, language):\n \"\"\"Check whether given language combination is supported.\"\"\"\n return (source, language) in self.supported_languages\n\n def download_languages(self):\n \"\"\"List of supported languages.\"\"\"\n response = self.request(\"get\", settings.MT_MODERNMT_URL + \"languages\")\n payload = response.json()\n\n for source, targets in payload[\"data\"].items():\n yield from ((source, target) for target in targets)\n\n def download_translations(\n self,\n source,\n language,\n text: str,\n unit,\n user,\n search: bool,\n threshold: int = 75,\n ):\n \"\"\"Download list of possible translations from a service.\"\"\"\n response = self.request(\n \"get\",\n settings.MT_MODERNMT_URL + \"translate\",\n params={\"q\": text, \"source\": source, \"target\": language},\n )\n payload = response.json()\n\n if \"error\" in payload:\n raise MachineTranslationError(payload[\"error\"][\"message\"])\n\n yield {\n \"text\": payload[\"data\"][\"translation\"],\n \"quality\": self.max_score,\n \"service\": self.name,\n \"source\": text,\n }\n\n def get_error_message(self, exc):\n if hasattr(exc, \"read\"):\n content = exc.read()\n try:\n data = json.loads(content)\n return data[\"error\"][\"message\"]\n except Exception:\n pass\n\n return super().get_error_message(exc)\n\n"}}},{"rowIdx":463,"cells":{"text":{"kind":"string","value":"\nfrom pynzbgetapi import NZBGetAPIException\n\nfrom homeassistant.components.nzbget.const import DOMAIN\nfrom homeassistant.config_entries import (\n ENTRY_STATE_LOADED,\n ENTRY_STATE_NOT_LOADED,\n ENTRY_STATE_SETUP_RETRY,\n)\nfrom homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT\nfrom homeassistant.setup import async_setup_component\n\nfrom . import (\n ENTRY_CONFIG,\n YAML_CONFIG,\n _patch_async_setup_entry,\n _patch_history,\n _patch_status,\n _patch_version,\n init_integration,\n)\n\nfrom tests.async_mock import patch\nfrom tests.common import MockConfigEntry\n\n\nasync def test_import_from_yaml(hass) -> None:\n \"\"\"Test import from YAML.\"\"\"\n with _patch_version(), _patch_status(), _patch_history(), _patch_async_setup_entry():\n assert await async_setup_component(hass, DOMAIN, {DOMAIN: YAML_CONFIG})\n await hass.async_block_till_done()\n\n entries = hass.config_entries.async_entries(DOMAIN)\n assert len(entries) == 1\n\n assert entries[0].data[CONF_NAME] == \"GetNZBsTest\"\n assert entries[0].data[CONF_HOST] == \"10.10.10.30\"\n assert entries[0].data[CONF_PORT] == 6789\n\n\nasync def test_unload_entry(hass, nzbget_api):\n \"\"\"Test successful unload of entry.\"\"\"\n entry = await init_integration(hass)\n\n assert len(hass.config_entries.async_entries(DOMAIN)) == 1\n assert entry.state == ENTRY_STATE_LOADED\n\n assert await hass.config_entries.async_unload(entry.entry_id)\n await hass.async_block_till_done()\n\n assert entry.state == ENTRY_STATE_NOT_LOADED\n assert not hass.data.get(DOMAIN)\n\n\nasync def test_async_setup_raises_entry_not_ready(hass):\n \"\"\"Test that it throws ConfigEntryNotReady when exception occurs during setup.\"\"\"\n config_entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG)\n config_entry.add_to_hass(hass)\n\n with _patch_version(), patch(\n \"homeassistant.components.nzbget.coordinator.NZBGetAPI.status\",\n side_effect=NZBGetAPIException(),\n ):\n await hass.config_entries.async_setup(config_entry.entry_id)\n\n assert config_entry.state == ENTRY_STATE_SETUP_RETRY\n\n"}}},{"rowIdx":464,"cells":{"text":{"kind":"string","value":"\nimport pytest\n\n\ndef pytest_addoption(parser):\n \"\"\"Add command-line flags for pytest.\"\"\"\n parser.addoption(\"--run-flaky\", action=\"store_true\", help=\"runs flaky tests\")\n parser.addoption(\n \"--run-network-tests\",\n action=\"store_true\",\n help=\"runs tests requiring a network connection\",\n )\n\n\ndef pytest_runtest_setup(item):\n # based on https://stackoverflow.com/questions/47559524\n if \"flaky\" in item.keywords and not item.config.getoption(\"--run-flaky\"):\n pytest.skip(\"set --run-flaky option to run flaky tests\")\n if \"network\" in item.keywords and not item.config.getoption(\"--run-network-tests\"):\n pytest.skip(\n \"set --run-network-tests to run test requiring an internet connection\"\n )\n\n\n@pytest.fixture(autouse=True)\ndef add_standard_imports(doctest_namespace, tmpdir):\n import numpy as np\n import pandas as pd\n\n import xarray as xr\n\n doctest_namespace[\"np\"] = np\n doctest_namespace[\"pd\"] = pd\n doctest_namespace[\"xr\"] = xr\n\n # always seed numpy.random to make the examples deterministic\n np.random.seed(0)\n\n # always switch to the temporary directory, so files get written there\n tmpdir.chdir()\n\n"}}},{"rowIdx":465,"cells":{"text":{"kind":"string","value":"\nfrom ... import event\nfrom . import Widget\n\n\nclass ImageWidget(Widget):\n \"\"\" Display an image from a url.\n \n The ``node`` of this widget is an\n ` `_\n wrapped in a `
`_\n (the ``outernode``) to handle sizing.\n \"\"\"\n\n DEFAULT_MIN_SIZE = 16, 16\n\n _sequence = 0\n\n source = event.StringProp('', settable=True, doc=\"\"\"\n The source of the image, This can be anything that an HTML\n img element supports.\n \"\"\")\n\n stretch = event.BoolProp(False, settable=True, doc=\"\"\"\n Whether the image should stretch to fill all available\n space, or maintain its aspect ratio (default).\n \"\"\")\n\n def _create_dom(self):\n global window\n outer = window.document.createElement('div')\n inner = window.document.createElement('img')\n outer.appendChild(inner)\n return outer, inner\n\n @event.reaction\n def __resize_image(self):\n size = self.size\n if self.stretch:\n self.node.style.maxWidth = None\n self.node.style.maxHeight = None\n self.node.style.width = size[0] + 'px'\n self.node.style.height = size[1] + 'px'\n else:\n self.node.style.maxWidth = size[0] + 'px'\n self.node.style.maxHeight = size[1] + 'px'\n self.node.style.width = None\n self.node.style.height = None\n\n @event.reaction\n def __source_changed(self):\n self.node.src = self.source\n\n\nclass VideoWidget(Widget):\n \"\"\" Display a video from a url.\n \n The ``node`` of this widget is a\n `