{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'GitHub加速' && linkText !== 'GitHub加速' ) { link.textContent = 'GitHub加速'; link.href = 'https://githubproxy.cc'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Vibevoice' ) { link.textContent = 'Vibevoice'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 替换Pricing链接 - 仅替换一次 else if ( (linkHref.includes('/pricing') || linkHref === '/pricing' || linkText === 'Pricing' || linkText.match(/^s*Pricings*$/i)) && linkText !== 'VoxCPM' ) { link.textContent = 'VoxCPM'; link.href = 'https://voxcpm.net/'; replacedLinks.add(link); } // 替换Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) && linkText !== 'IndexTTS2' ) { link.textContent = 'IndexTTS2'; link.href = 'https://vibevoice.info/indextts2'; replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'GitHub加速'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n '''))\n sitemap_menu = copy.deepcopy(menu)\n SubElement(SubElement(sitemap_menu[-1], 'li'), 'a', href='http://lxml.de/files/').text = 'Download files'\n sitemap[-1].append(sitemap_menu) # append to body\n ElementTree(sitemap).write(os.path.join(dirname, 'sitemap.html'))\n\n # integrate sitemap into the menu\n SubElement(SubElement(menu_div[-1], 'li'), 'a', href='/sitemap.html').text = 'Sitemap'\n\n # integrate menu into web pages\n for tree, basename, outpath in trees.values():\n head = find_head(tree)[0]\n SubElement(head, 'script', type='text/javascript').text = menu_js\n SubElement(head, 'meta', name='viewport', content=\"width=device-width, initial-scale=1\")\n find_body(tree)[0].set('onclick', 'hide_menu()')\n\n new_tree = merge_menu(tree, menu, basename)\n title = find_title_tag(new_tree)\n if title and title[0].text == 'lxml':\n title[0].text = \"lxml - Processing XML and HTML with Python\"\n heading = find_heading_tag(new_tree)\n if heading:\n heading[0].text = \"lxml - XML and HTML with Python\"\n new_tree.write(outpath)\n\n\nif __name__ == '__main__':\n publish(sys.argv[1], sys.argv[2], sys.argv[3])\n\n"}}},{"rowIdx":326,"cells":{"text":{"kind":"string","value":"\nimport importlib\nimport logging\nimport os\n\nfrom perfkitbenchmarker import events\nfrom perfkitbenchmarker import import_util\nfrom perfkitbenchmarker import requirements\nfrom perfkitbenchmarker.providers import aws\nfrom perfkitbenchmarker.providers import azure\nfrom perfkitbenchmarker.providers import gcp\n\n\nGCP = gcp.CLOUD\nAZURE = azure.CLOUD\nAWS = aws.CLOUD\nALICLOUD = 'AliCloud'\nKUBERNETES = 'Kubernetes'\nDIGITALOCEAN = 'DigitalOcean'\nOPENSTACK = 'OpenStack'\nCLOUDSTACK = 'CloudStack'\nRACKSPACE = 'Rackspace'\nMESOS = 'Mesos'\nPROFITBRICKS = 'ProfitBricks'\n# Though Docker is not a cloud provider, it's inclusion is useful\n# for performing on premise to cloud benchmarks\nDOCKER = 'Docker'\n\nVALID_CLOUDS = (GCP, AZURE, AWS, DIGITALOCEAN, KUBERNETES, OPENSTACK,\n RACKSPACE, CLOUDSTACK, ALICLOUD, MESOS, PROFITBRICKS, DOCKER)\n\n\n_imported_providers = set()\n\n\ndef LoadProviderFlags(providers):\n \"\"\"Imports just the flags module for each provider.\n\n This allows PKB to load flag definitions from each provider to include in the\n help text without actually loading any other provider-specific modules.\n\n Args:\n providers: series of strings. Each element is a value from VALID_CLOUDS\n indicating a cloud provider for which to import the flags module.\n \"\"\"\n for provider_name in providers:\n normalized_name = provider_name.lower()\n flags_module_name = '.'.join((__name__, normalized_name, 'flags'))\n importlib.import_module(flags_module_name)\n\n\n# Import flag definitions for all cloud providers.\nLoadProviderFlags(VALID_CLOUDS)\n\n\ndef LoadProvider(provider_name, ignore_package_requirements=True):\n \"\"\"Loads the all modules in the 'provider_name' package.\n\n This function first checks the specified provider's Python package\n requirements file, if one exists, and verifies that all requirements are met.\n Next, it loads all modules in the specified provider's package. By loading\n these modules, relevant classes (e.g. VMs) will register themselves.\n\n Args:\n provider_name: string chosen from VALID_CLOUDS. The name of the provider\n whose modules should be loaded.\n ignore_package_requirements: boolean. If True, the provider's Python package\n requirements file is ignored.\n \"\"\"\n if provider_name in _imported_providers:\n return\n\n # Check package requirements from the provider's pip requirements file.\n normalized_name = provider_name.lower()\n if not ignore_package_requirements:\n requirements.CheckProviderRequirements(normalized_name)\n\n # Load all modules in the provider's directory. Simply loading those modules\n # will cause relevant classes (e.g. VM and disk classes) to register\n # themselves so that they can be instantiated during resource provisioning.\n provider_package_path = os.path.join(__path__[0], normalized_name)\n try:\n modules = tuple(import_util.LoadModulesForPath(\n [provider_package_path], __name__ + '.' + normalized_name))\n if not modules:\n raise ImportError('No modules found for provider %s.' % provider_name)\n except Exception:\n logging.error('Unable to load provider %s.', provider_name)\n raise\n\n # Signal that the provider's modules have been imported.\n _imported_providers.add(provider_name)\n events.provider_imported.send(provider_name)\n\n"}}},{"rowIdx":327,"cells":{"text":{"kind":"string","value":"\nfrom weblate.checks.angularjs import AngularJSInterpolationCheck\nfrom weblate.checks.tests.test_checks import CheckTestCase, MockUnit\n\n\nclass AngularJSInterpolationCheckTest(CheckTestCase):\n check = AngularJSInterpolationCheck()\n\n def test_no_format(self):\n self.assertFalse(self.check.check_format(\"strins\", \"string\", False))\n\n def test_format(self):\n self.assertFalse(\n self.check.check_format(\n \"{{name}} string {{other}}\", \"{{name}} {{other}} string\", False\n )\n )\n\n def test_format_ignore_position(self):\n self.assertFalse(\n self.check.check_format(\n \"{{name}} string {{other}}\", \"{{other}} string {{name}}\", False\n )\n )\n\n def test_different_whitespace(self):\n self.assertFalse(\n self.check.check_format(\"{{ name }} string\", \"{{name}} string\", False)\n )\n\n def test_missing_format(self):\n self.assertTrue(self.check.check_format(\"{{name}} string\", \"string\", False))\n\n def test_wrong_value(self):\n self.assertTrue(\n self.check.check_format(\"{{name}} string\", \"{{nameerror}} string\", False)\n )\n\n def test_extended_formatting(self):\n self.assertFalse(\n self.check.check_format(\n \"Value: {{ something.value | currency }}\",\n \"Wert: {{ something.value | currency }}\",\n False,\n )\n )\n self.assertTrue(\n self.check.check_format(\n \"Value: {{ something.value | currency }}\",\n \"Value: {{ something.value }}\",\n False,\n )\n )\n\n def test_check_highlight(self):\n highlights = self.check.check_highlight(\n \"{{name}} {{ something.value | currency }} string\",\n MockUnit(\"angularjs_format\", flags=\"angularjs-format\"),\n )\n self.assertEqual(2, len(highlights))\n self.assertEqual(0, highlights[0][0])\n self.assertEqual(8, highlights[0][1])\n self.assertEqual(9, highlights[1][0])\n self.assertEqual(41, highlights[1][1])\n\n def test_check_highlight_ignored(self):\n highlights = self.check.check_highlight(\n \"{{name}} {{other}} string\",\n MockUnit(\"angularjs_format\", flags=\"ignore-angularjs-format\"),\n )\n self.assertEqual([], highlights)\n\n"}}},{"rowIdx":328,"cells":{"text":{"kind":"string","value":"\nimport mock\nfrom zake.fake_client import FakeClient\n\nfrom paasta_tools.api import settings\nfrom paasta_tools.api.views import deploy_queue\nfrom paasta_tools.deployd.common import ServiceInstance\nfrom paasta_tools.utils import SystemPaastaConfig\n\n\n@mock.patch(\"paasta_tools.api.views.deploy_queue.KazooClient\", autospec=True)\n@mock.patch(\"paasta_tools.api.views.deploy_queue.ZKDelayDeadlineQueue\", autospec=True)\ndef test_list_deploy_queue(mock_delay_deadline_queue_class, mock_kazoo_client):\n mock_request = mock.Mock()\n settings.system_paasta_config = mock.create_autospec(SystemPaastaConfig)\n mock_kazoo_client.return_value = FakeClient()\n\n available_service_instance = ServiceInstance(\n service=\"fake_service1\",\n instance=\"fake_instance1\",\n watcher=\"worker0\",\n bounce_by=1577952000,\n wait_until=1577952000,\n enqueue_time=1577952000,\n bounce_start_time=1577952000,\n failures=1,\n processed_count=2,\n )\n unavailable_service_instance = ServiceInstance(\n service=\"fake_service2\",\n instance=\"fake_instance2\",\n watcher=\"worker1\",\n bounce_by=1577952100,\n wait_until=1577952200,\n enqueue_time=1577952100,\n bounce_start_time=1577952100,\n failures=2,\n processed_count=3,\n )\n\n mock_delay_deadline_queue = mock_delay_deadline_queue_class.return_value\n mock_delay_deadline_queue.get_available_service_instances.return_value = [\n (mock.Mock(), available_service_instance)\n ]\n mock_delay_deadline_queue.get_unavailable_service_instances.return_value = [\n (mock.Mock(), mock.Mock(), unavailable_service_instance)\n ]\n\n output = deploy_queue.list_deploy_queue(mock_request)\n assert output == {\n \"available_service_instances\": [\n {\n \"service\": \"fake_service1\",\n \"instance\": \"fake_instance1\",\n \"watcher\": \"worker0\",\n \"bounce_by\": 1577952000,\n \"wait_until\": 1577952000,\n \"enqueue_time\": 1577952000,\n \"bounce_start_time\": 1577952000,\n \"failures\": 1,\n \"processed_count\": 2,\n }\n ],\n \"unavailable_service_instances\": [\n {\n \"service\": \"fake_service2\",\n \"instance\": \"fake_instance2\",\n \"watcher\": \"worker1\",\n \"bounce_by\": 1577952100,\n \"wait_until\": 1577952200,\n \"enqueue_time\": 1577952100,\n \"bounce_start_time\": 1577952100,\n \"failures\": 2,\n \"processed_count\": 3,\n }\n ],\n }\n\n"}}},{"rowIdx":329,"cells":{"text":{"kind":"string","value":"\nimport logging\nimport unittest\nimport numpy as np\n\nfrom gensim.models import LdaModel\nfrom gensim.test.utils import common_dictionary, common_corpus\n\n\nclass TestLdaDiff(unittest.TestCase):\n def setUp(self):\n self.dictionary = common_dictionary\n self.corpus = common_corpus\n self.num_topics = 5\n self.n_ann_terms = 10\n self.model = LdaModel(corpus=self.corpus, id2word=self.dictionary, num_topics=self.num_topics, passes=10)\n\n def testBasic(self):\n # test for matrix case\n mdiff, annotation = self.model.diff(self.model, n_ann_terms=self.n_ann_terms)\n\n self.assertEqual(mdiff.shape, (self.num_topics, self.num_topics))\n self.assertEqual(len(annotation), self.num_topics)\n self.assertEqual(len(annotation[0]), self.num_topics)\n\n # test for diagonal case\n mdiff, annotation = self.model.diff(self.model, n_ann_terms=self.n_ann_terms, diagonal=True)\n\n self.assertEqual(mdiff.shape, (self.num_topics,))\n self.assertEqual(len(annotation), self.num_topics)\n\n def testIdentity(self):\n for dist_name in [\"hellinger\", \"kullback_leibler\", \"jaccard\"]:\n # test for matrix case\n mdiff, annotation = self.model.diff(self.model, n_ann_terms=self.n_ann_terms, distance=dist_name)\n\n for row in annotation:\n for (int_tokens, diff_tokens) in row:\n self.assertEqual(diff_tokens, [])\n self.assertEqual(len(int_tokens), self.n_ann_terms)\n\n self.assertTrue(np.allclose(np.diag(mdiff), np.zeros(mdiff.shape[0], dtype=mdiff.dtype)))\n\n if dist_name == \"jaccard\":\n self.assertTrue(np.allclose(mdiff, np.zeros(mdiff.shape, dtype=mdiff.dtype)))\n\n # test for diagonal case\n mdiff, annotation = \\\n self.model.diff(self.model, n_ann_terms=self.n_ann_terms, distance=dist_name, diagonal=True)\n\n for (int_tokens, diff_tokens) in annotation:\n self.assertEqual(diff_tokens, [])\n self.assertEqual(len(int_tokens), self.n_ann_terms)\n\n self.assertTrue(np.allclose(mdiff, np.zeros(mdiff.shape, dtype=mdiff.dtype)))\n\n if dist_name == \"jaccard\":\n self.assertTrue(np.allclose(mdiff, np.zeros(mdiff.shape, dtype=mdiff.dtype)))\n\n def testInput(self):\n self.assertRaises(ValueError, self.model.diff, self.model, n_ann_terms=self.n_ann_terms, distance='something')\n self.assertRaises(ValueError, self.model.diff, [], n_ann_terms=self.n_ann_terms, distance='something')\n\n\nif __name__ == '__main__':\n logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)\n unittest.main()\n\n"}}},{"rowIdx":330,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nimport logging.handlers\nfrom timeit import default_timer as timer\nfrom types import ModuleType\nfrom typing import Awaitable, Callable, Optional, Set\n\nfrom homeassistant import config as conf_util, core, loader, requirements\nfrom homeassistant.config import async_notify_setup_error\nfrom homeassistant.const import EVENT_COMPONENT_LOADED, PLATFORM_FORMAT\nfrom homeassistant.exceptions import HomeAssistantError\nfrom homeassistant.helpers.typing import ConfigType\nfrom homeassistant.util import dt as dt_util\n\n_LOGGER = logging.getLogger(__name__)\n\nATTR_COMPONENT = \"component\"\n\nDATA_SETUP_DONE = \"setup_done\"\nDATA_SETUP_STARTED = \"setup_started\"\nDATA_SETUP = \"setup_tasks\"\nDATA_DEPS_REQS = \"deps_reqs_processed\"\n\nSLOW_SETUP_WARNING = 10\nSLOW_SETUP_MAX_WAIT = 300\n\n\n@core.callback\ndef async_set_domains_to_be_loaded(hass: core.HomeAssistant, domains: Set[str]) -> None:\n \"\"\"Set domains that are going to be loaded from the config.\n\n This will allow us to properly handle after_dependencies.\n \"\"\"\n hass.data[DATA_SETUP_DONE] = {domain: asyncio.Event() for domain in domains}\n\n\ndef setup_component(hass: core.HomeAssistant, domain: str, config: ConfigType) -> bool:\n \"\"\"Set up a component and all its dependencies.\"\"\"\n return asyncio.run_coroutine_threadsafe(\n async_setup_component(hass, domain, config), hass.loop\n ).result()\n\n\nasync def async_setup_component(\n hass: core.HomeAssistant, domain: str, config: ConfigType\n) -> bool:\n \"\"\"Set up a component and all its dependencies.\n\n This method is a coroutine.\n \"\"\"\n if domain in hass.config.components:\n return True\n\n setup_tasks = hass.data.setdefault(DATA_SETUP, {})\n\n if domain in setup_tasks:\n return await setup_tasks[domain] # type: ignore\n\n task = setup_tasks[domain] = hass.async_create_task(\n _async_setup_component(hass, domain, config)\n )\n\n try:\n return await task # type: ignore\n finally:\n if domain in hass.data.get(DATA_SETUP_DONE, {}):\n hass.data[DATA_SETUP_DONE].pop(domain).set()\n\n\nasync def _async_process_dependencies(\n hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration\n) -> bool:\n \"\"\"Ensure all dependencies are set up.\"\"\"\n dependencies_tasks = {\n dep: hass.loop.create_task(async_setup_component(hass, dep, config))\n for dep in integration.dependencies\n if dep not in hass.config.components\n }\n\n after_dependencies_tasks = {}\n to_be_loaded = hass.data.get(DATA_SETUP_DONE, {})\n for dep in integration.after_dependencies:\n if (\n dep not in dependencies_tasks\n and dep in to_be_loaded\n and dep not in hass.config.components\n ):\n after_dependencies_tasks[dep] = hass.loop.create_task(\n to_be_loaded[dep].wait()\n )\n\n if not dependencies_tasks and not after_dependencies_tasks:\n return True\n\n if dependencies_tasks:\n _LOGGER.debug(\n \"Dependency %s will wait for dependencies %s\",\n integration.domain,\n list(dependencies_tasks),\n )\n if after_dependencies_tasks:\n _LOGGER.debug(\n \"Dependency %s will wait for after dependencies %s\",\n integration.domain,\n list(after_dependencies_tasks),\n )\n\n async with hass.timeout.async_freeze(integration.domain):\n results = await asyncio.gather(\n *dependencies_tasks.values(), *after_dependencies_tasks.values()\n )\n\n failed = [\n domain for idx, domain in enumerate(dependencies_tasks) if not results[idx]\n ]\n\n if failed:\n _LOGGER.error(\n \"Unable to set up dependencies of %s. Setup failed for dependencies: %s\",\n integration.domain,\n \", \".join(failed),\n )\n\n return False\n return True\n\n\nasync def _async_setup_component(\n hass: core.HomeAssistant, domain: str, config: ConfigType\n) -> bool:\n \"\"\"Set up a component for Home Assistant.\n\n This method is a coroutine.\n \"\"\"\n\n def log_error(msg: str, link: Optional[str] = None) -> None:\n \"\"\"Log helper.\"\"\"\n _LOGGER.error(\"Setup failed for %s: %s\", domain, msg)\n async_notify_setup_error(hass, domain, link)\n\n try:\n integration = await loader.async_get_integration(hass, domain)\n except loader.IntegrationNotFound:\n log_error(\"Integration not found.\")\n return False\n\n if integration.disabled:\n log_error(f\"dependency is disabled - {integration.disabled}\")\n return False\n\n # Validate all dependencies exist and there are no circular dependencies\n if not await integration.resolve_dependencies():\n return False\n\n # Process requirements as soon as possible, so we can import the component\n # without requiring imports to be in functions.\n try:\n await async_process_deps_reqs(hass, config, integration)\n except HomeAssistantError as err:\n log_error(str(err), integration.documentation)\n return False\n\n # Some integrations fail on import because they call functions incorrectly.\n # So we do it before validating config to catch these errors.\n try:\n component = integration.get_component()\n except ImportError as err:\n log_error(f\"Unable to import component: {err}\", integration.documentation)\n return False\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Setup failed for %s: unknown error\", domain)\n return False\n\n processed_config = await conf_util.async_process_component_config(\n hass, config, integration\n )\n\n if processed_config is None:\n log_error(\"Invalid config.\", integration.documentation)\n return False\n\n start = timer()\n _LOGGER.info(\"Setting up %s\", domain)\n hass.data.setdefault(DATA_SETUP_STARTED, {})[domain] = dt_util.utcnow()\n\n if hasattr(component, \"PLATFORM_SCHEMA\"):\n # Entity components have their own warning\n warn_task = None\n else:\n warn_task = hass.loop.call_later(\n SLOW_SETUP_WARNING,\n _LOGGER.warning,\n \"Setup of %s is taking over %s seconds.\",\n domain,\n SLOW_SETUP_WARNING,\n )\n\n try:\n if hasattr(component, \"async_setup\"):\n task = component.async_setup(hass, processed_config) # type: ignore\n elif hasattr(component, \"setup\"):\n # This should not be replaced with hass.async_add_executor_job because\n # we don't want to track this task in case it blocks startup.\n task = hass.loop.run_in_executor(\n None, component.setup, hass, processed_config # type: ignore\n )\n else:\n log_error(\"No setup function defined.\")\n hass.data[DATA_SETUP_STARTED].pop(domain)\n return False\n\n async with hass.timeout.async_timeout(SLOW_SETUP_MAX_WAIT, domain):\n result = await task\n except asyncio.TimeoutError:\n _LOGGER.error(\n \"Setup of %s is taking longer than %s seconds.\"\n \" Startup will proceed without waiting any longer\",\n domain,\n SLOW_SETUP_MAX_WAIT,\n )\n hass.data[DATA_SETUP_STARTED].pop(domain)\n return False\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Error during setup of component %s\", domain)\n async_notify_setup_error(hass, domain, integration.documentation)\n hass.data[DATA_SETUP_STARTED].pop(domain)\n return False\n finally:\n end = timer()\n if warn_task:\n warn_task.cancel()\n _LOGGER.info(\"Setup of domain %s took %.1f seconds\", domain, end - start)\n\n if result is False:\n log_error(\"Integration failed to initialize.\")\n hass.data[DATA_SETUP_STARTED].pop(domain)\n return False\n if result is not True:\n log_error(\n f\"Integration {domain!r} did not return boolean if setup was \"\n \"successful. Disabling component.\"\n )\n hass.data[DATA_SETUP_STARTED].pop(domain)\n return False\n\n # Flush out async_setup calling create_task. Fragile but covered by test.\n await asyncio.sleep(0)\n await hass.config_entries.flow.async_wait_init_flow_finish(domain)\n\n await asyncio.gather(\n *[\n entry.async_setup(hass, integration=integration)\n for entry in hass.config_entries.async_entries(domain)\n ]\n )\n\n hass.config.components.add(domain)\n hass.data[DATA_SETUP_STARTED].pop(domain)\n\n # Cleanup\n if domain in hass.data[DATA_SETUP]:\n hass.data[DATA_SETUP].pop(domain)\n\n hass.bus.async_fire(EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: domain})\n\n return True\n\n\nasync def async_prepare_setup_platform(\n hass: core.HomeAssistant, hass_config: ConfigType, domain: str, platform_name: str\n) -> Optional[ModuleType]:\n \"\"\"Load a platform and makes sure dependencies are setup.\n\n This method is a coroutine.\n \"\"\"\n platform_path = PLATFORM_FORMAT.format(domain=domain, platform=platform_name)\n\n def log_error(msg: str) -> None:\n \"\"\"Log helper.\"\"\"\n _LOGGER.error(\"Unable to prepare setup for platform %s: %s\", platform_path, msg)\n async_notify_setup_error(hass, platform_path)\n\n try:\n integration = await loader.async_get_integration(hass, platform_name)\n except loader.IntegrationNotFound:\n log_error(\"Integration not found\")\n return None\n\n # Process deps and reqs as soon as possible, so that requirements are\n # available when we import the platform.\n try:\n await async_process_deps_reqs(hass, hass_config, integration)\n except HomeAssistantError as err:\n log_error(str(err))\n return None\n\n try:\n platform = integration.get_platform(domain)\n except ImportError as exc:\n log_error(f\"Platform not found ({exc}).\")\n return None\n\n # Already loaded\n if platform_path in hass.config.components:\n return platform\n\n # Platforms cannot exist on their own, they are part of their integration.\n # If the integration is not set up yet, and can be set up, set it up.\n if integration.domain not in hass.config.components:\n try:\n component = integration.get_component()\n except ImportError as exc:\n log_error(f\"Unable to import the component ({exc}).\")\n return None\n\n if hasattr(component, \"setup\") or hasattr(component, \"async_setup\"):\n if not await async_setup_component(hass, integration.domain, hass_config):\n log_error(\"Unable to set up component.\")\n return None\n\n return platform\n\n\nasync def async_process_deps_reqs(\n hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration\n) -> None:\n \"\"\"Process all dependencies and requirements for a module.\n\n Module is a Python module of either a component or platform.\n \"\"\"\n processed = hass.data.get(DATA_DEPS_REQS)\n\n if processed is None:\n processed = hass.data[DATA_DEPS_REQS] = set()\n elif integration.domain in processed:\n return\n\n if not await _async_process_dependencies(hass, config, integration):\n raise HomeAssistantError(\"Could not set up all dependencies.\")\n\n if not hass.config.skip_pip and integration.requirements:\n async with hass.timeout.async_freeze(integration.domain):\n await requirements.async_get_integration_with_requirements(\n hass, integration.domain\n )\n\n processed.add(integration.domain)\n\n\n@core.callback\ndef async_when_setup(\n hass: core.HomeAssistant,\n component: str,\n when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]],\n) -> None:\n \"\"\"Call a method when a component is setup.\"\"\"\n\n async def when_setup() -> None:\n \"\"\"Call the callback.\"\"\"\n try:\n await when_setup_cb(hass, component)\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Error handling when_setup callback for %s\", component)\n\n # Running it in a new task so that it always runs after\n if component in hass.config.components:\n hass.async_create_task(when_setup())\n return\n\n unsub = None\n\n async def loaded_event(event: core.Event) -> None:\n \"\"\"Call the callback.\"\"\"\n if event.data[ATTR_COMPONENT] != component:\n return\n\n unsub() # type: ignore\n await when_setup()\n\n unsub = hass.bus.async_listen(EVENT_COMPONENT_LOADED, loaded_event)\n\n"}}},{"rowIdx":331,"cells":{"text":{"kind":"string","value":"\nfrom typing import Type\n\nfrom homeassistant.config_entries import ConfigEntry\n\nfrom .board import FirmataPinType\nfrom .const import DOMAIN, FIRMATA_MANUFACTURER\nfrom .pin import FirmataBoardPin\n\n\nclass FirmataEntity:\n \"\"\"Representation of a Firmata entity.\"\"\"\n\n def __init__(self, api):\n \"\"\"Initialize the entity.\"\"\"\n self._api = api\n\n @property\n def device_info(self) -> dict:\n \"\"\"Return device info.\"\"\"\n return {\n \"connections\": {},\n \"identifiers\": {(DOMAIN, self._api.board.name)},\n \"manufacturer\": FIRMATA_MANUFACTURER,\n \"name\": self._api.board.name,\n \"sw_version\": self._api.board.firmware_version,\n }\n\n\nclass FirmataPinEntity(FirmataEntity):\n \"\"\"Representation of a Firmata pin entity.\"\"\"\n\n def __init__(\n self,\n api: Type[FirmataBoardPin],\n config_entry: ConfigEntry,\n name: str,\n pin: FirmataPinType,\n ):\n \"\"\"Initialize the pin entity.\"\"\"\n super().__init__(api)\n self._name = name\n\n location = (config_entry.entry_id, \"pin\", pin)\n self._unique_id = \"_\".join(str(i) for i in location)\n\n @property\n def name(self) -> str:\n \"\"\"Get the name of the pin.\"\"\"\n return self._name\n\n @property\n def should_poll(self) -> bool:\n \"\"\"No polling needed.\"\"\"\n return False\n\n @property\n def unique_id(self) -> str:\n \"\"\"Return a unique identifier for this device.\"\"\"\n return self._unique_id\n\n"}}},{"rowIdx":332,"cells":{"text":{"kind":"string","value":"\nfrom datetime import timedelta\nimport logging\n\nfrom pyblockchain import get_balance, validate_address\nimport voluptuous as vol\n\nfrom homeassistant.components.sensor import PLATFORM_SCHEMA\nfrom homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME\nimport homeassistant.helpers.config_validation as cv\nfrom homeassistant.helpers.entity import Entity\n\n_LOGGER = logging.getLogger(__name__)\n\nATTRIBUTION = \"Data provided by blockchain.com\"\n\nCONF_ADDRESSES = \"addresses\"\n\nDEFAULT_NAME = \"Bitcoin Balance\"\n\nICON = \"mdi:currency-btc\"\n\nSCAN_INTERVAL = timedelta(minutes=5)\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Required(CONF_ADDRESSES): [cv.string],\n vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,\n }\n)\n\n\ndef setup_platform(hass, config, add_entities, discovery_info=None):\n \"\"\"Set up the Blockchain.com sensors.\"\"\"\n\n addresses = config[CONF_ADDRESSES]\n name = config[CONF_NAME]\n\n for address in addresses:\n if not validate_address(address):\n _LOGGER.error(\"Bitcoin address is not valid: %s\", address)\n return False\n\n add_entities([BlockchainSensor(name, addresses)], True)\n\n\nclass BlockchainSensor(Entity):\n \"\"\"Representation of a Blockchain.com sensor.\"\"\"\n\n def __init__(self, name, addresses):\n \"\"\"Initialize the sensor.\"\"\"\n self._name = name\n self.addresses = addresses\n self._state = None\n self._unit_of_measurement = \"BTC\"\n\n @property\n def name(self):\n \"\"\"Return the name of the sensor.\"\"\"\n return self._name\n\n @property\n def state(self):\n \"\"\"Return the state of the sensor.\"\"\"\n return self._state\n\n @property\n def unit_of_measurement(self):\n \"\"\"Return the unit of measurement this sensor expresses itself in.\"\"\"\n return self._unit_of_measurement\n\n @property\n def icon(self):\n \"\"\"Return the icon to use in the frontend, if any.\"\"\"\n return ICON\n\n @property\n def device_state_attributes(self):\n \"\"\"Return the state attributes of the sensor.\"\"\"\n return {ATTR_ATTRIBUTION: ATTRIBUTION}\n\n def update(self):\n \"\"\"Get the latest state of the sensor.\"\"\"\n\n self._state = get_balance(self.addresses)\n\n"}}},{"rowIdx":333,"cells":{"text":{"kind":"string","value":"\nimport os\nimport json\nfrom app.utils.HookDataParse import (\n get_repo_name,\n get_repo_branch,\n get_push_name,\n get_push_email\n)\n\nWEBHOOKDATA_DIR = os.path.join(os.path.dirname(__file__), 'webhookdata')\nWEBHOOKDATA = {}\nfor filename in os.listdir(WEBHOOKDATA_DIR):\n name = os.path.splitext(filename)[0]\n with open(os.path.join(WEBHOOKDATA_DIR, filename)) as f:\n data = json.load(f)\n WEBHOOKDATA[name] = data\n\n\ndef test():\n for name, data in WEBHOOKDATA.items():\n print('\\n' + name.center(60, '-'))\n print(get_repo_name(data))\n print(get_repo_branch(data))\n print(get_push_name(data))\n print(get_push_email(data))\n\n"}}},{"rowIdx":334,"cells":{"text":{"kind":"string","value":"\nimport os\n\nfrom babelfish import Language, language_converters\nimport pytest\nfrom vcr import VCR\n\nfrom subliminal.providers.thesubdb import TheSubDBProvider, TheSubDBSubtitle\n\n\nvcr = VCR(path_transformer=lambda path: path + '.yaml',\n record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),\n cassette_library_dir=os.path.realpath(os.path.join('tests', 'cassettes', 'thesubdb')))\n\n\n@pytest.mark.converter\ndef test_converter_convert_alpha3_country():\n assert language_converters['thesubdb'].convert('por', 'BR') == 'pt'\n\n\n@pytest.mark.converter\ndef test_converter_convert_alpha3():\n assert language_converters['thesubdb'].convert('eng') == 'en'\n\n\n@pytest.mark.converter\ndef test_converter_convert_alpha3_alpha2_converter():\n assert language_converters['thesubdb'].convert('fra') == 'fr'\n\n\n@pytest.mark.converter\ndef test_converter_reverse():\n assert language_converters['thesubdb'].reverse('en') == ('eng', )\n\n\n@pytest.mark.converter\ndef test_converter_reverse_alpha3_country():\n assert language_converters['thesubdb'].reverse('pt') == ('por', 'BR')\n\n\ndef test_get_matches(movies):\n subtitle = TheSubDBSubtitle(Language('eng'), 'ad32876133355929d814457537e12dc2')\n matches = subtitle.get_matches(movies['man_of_steel'])\n assert matches == {'hash'}\n\n\ndef test_get_matches_no_match(episodes):\n subtitle = TheSubDBSubtitle(Language('eng'), 'ad32876133355929d814457537e12dc2')\n matches = subtitle.get_matches(episodes['got_s03e10'])\n assert matches == set()\n\n\n@pytest.mark.integration\n@vcr.use_cassette\ndef test_query(movies):\n video = movies['man_of_steel']\n expected_languages = {Language('eng'), Language('por', 'BR')}\n with TheSubDBProvider() as provider:\n subtitles = provider.query(video.hashes['thesubdb'])\n assert len(subtitles) == 2\n assert {subtitle.language for subtitle in subtitles} == expected_languages\n\n\n@pytest.mark.integration\n@vcr.use_cassette\ndef test_query_wrong_hash():\n with TheSubDBProvider() as provider:\n subtitles = provider.query('11223344556677899877665544332211')\n assert len(subtitles) == 0\n\n\n@pytest.mark.integration\n@vcr.use_cassette\ndef test_list_subtitles(episodes):\n video = episodes['bbt_s07e05']\n languages = {Language('eng'), Language('fra')}\n with TheSubDBProvider() as provider:\n subtitles = provider.list_subtitles(video, languages)\n assert len(subtitles) == 2\n assert {subtitle.language for subtitle in subtitles} == languages\n\n\n@pytest.mark.integration\n@vcr.use_cassette\ndef test_download_subtitle(episodes):\n video = episodes['bbt_s07e05']\n languages = {Language('eng'), Language('fra')}\n with TheSubDBProvider() as provider:\n subtitles = provider.list_subtitles(video, languages)\n provider.download_subtitle(subtitles[0])\n assert subtitles[0].content is not None\n assert subtitles[0].is_valid() is True\n\n"}}},{"rowIdx":335,"cells":{"text":{"kind":"string","value":"\nimport logging\n\nfrom tuyaha import TuyaApi\nfrom tuyaha.tuyaapi import TuyaAPIException, TuyaNetException, TuyaServerException\nimport voluptuous as vol\n\nfrom homeassistant import config_entries\nfrom homeassistant.const import (\n CONF_PASSWORD,\n CONF_PLATFORM,\n CONF_UNIT_OF_MEASUREMENT,\n CONF_USERNAME,\n ENTITY_MATCH_NONE,\n TEMP_CELSIUS,\n TEMP_FAHRENHEIT,\n)\nfrom homeassistant.core import callback\nimport homeassistant.helpers.config_validation as cv\n\n# pylint:disable=unused-import\nfrom .const import (\n CONF_BRIGHTNESS_RANGE_MODE,\n CONF_COUNTRYCODE,\n CONF_CURR_TEMP_DIVIDER,\n CONF_DISCOVERY_INTERVAL,\n CONF_EXT_TEMP_SENSOR,\n CONF_MAX_KELVIN,\n CONF_MAX_TEMP,\n CONF_MIN_KELVIN,\n CONF_MIN_TEMP,\n CONF_QUERY_DEVICE,\n CONF_QUERY_INTERVAL,\n CONF_SUPPORT_COLOR,\n CONF_TEMP_DIVIDER,\n CONF_TUYA_MAX_COLTEMP,\n DEFAULT_DISCOVERY_INTERVAL,\n DEFAULT_QUERY_INTERVAL,\n DEFAULT_TUYA_MAX_COLTEMP,\n DOMAIN,\n TUYA_DATA,\n TUYA_PLATFORMS,\n TUYA_TYPE_NOT_QUERY,\n)\n\n_LOGGER = logging.getLogger(__name__)\n\nCONF_LIST_DEVICES = \"list_devices\"\n\nDATA_SCHEMA_USER = vol.Schema(\n {\n vol.Required(CONF_USERNAME): str,\n vol.Required(CONF_PASSWORD): str,\n vol.Required(CONF_COUNTRYCODE): vol.Coerce(int),\n vol.Required(CONF_PLATFORM): vol.In(TUYA_PLATFORMS),\n }\n)\n\nERROR_DEV_MULTI_TYPE = \"dev_multi_type\"\nERROR_DEV_NOT_CONFIG = \"dev_not_config\"\nERROR_DEV_NOT_FOUND = \"dev_not_found\"\n\nRESULT_AUTH_FAILED = \"invalid_auth\"\nRESULT_CONN_ERROR = \"cannot_connect\"\nRESULT_SUCCESS = \"success\"\n\nRESULT_LOG_MESSAGE = {\n RESULT_AUTH_FAILED: \"Invalid credential\",\n RESULT_CONN_ERROR: \"Connection error\",\n}\n\nTUYA_TYPE_CONFIG = [\"climate\", \"light\"]\n\n\nclass TuyaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):\n \"\"\"Handle a tuya config flow.\"\"\"\n\n VERSION = 1\n CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL\n\n def __init__(self):\n \"\"\"Initialize flow.\"\"\"\n self._country_code = None\n self._password = None\n self._platform = None\n self._username = None\n self._is_import = False\n\n def _save_entry(self):\n return self.async_create_entry(\n title=self._username,\n data={\n CONF_COUNTRYCODE: self._country_code,\n CONF_PASSWORD: self._password,\n CONF_PLATFORM: self._platform,\n CONF_USERNAME: self._username,\n },\n )\n\n def _try_connect(self):\n \"\"\"Try to connect and check auth.\"\"\"\n tuya = TuyaApi()\n try:\n tuya.init(\n self._username, self._password, self._country_code, self._platform\n )\n except (TuyaNetException, TuyaServerException):\n return RESULT_CONN_ERROR\n except TuyaAPIException:\n return RESULT_AUTH_FAILED\n\n return RESULT_SUCCESS\n\n async def async_step_import(self, user_input=None):\n \"\"\"Handle configuration by yaml file.\"\"\"\n self._is_import = True\n return await self.async_step_user(user_input)\n\n async def async_step_user(self, user_input=None):\n \"\"\"Handle a flow initialized by the user.\"\"\"\n if self._async_current_entries():\n return self.async_abort(reason=\"single_instance_allowed\")\n\n errors = {}\n\n if user_input is not None:\n\n self._country_code = str(user_input[CONF_COUNTRYCODE])\n self._password = user_input[CONF_PASSWORD]\n self._platform = user_input[CONF_PLATFORM]\n self._username = user_input[CONF_USERNAME]\n\n result = await self.hass.async_add_executor_job(self._try_connect)\n\n if result == RESULT_SUCCESS:\n return self._save_entry()\n if result != RESULT_AUTH_FAILED or self._is_import:\n if self._is_import:\n _LOGGER.error(\n \"Error importing from configuration.yaml: %s\",\n RESULT_LOG_MESSAGE.get(result, \"Generic Error\"),\n )\n return self.async_abort(reason=result)\n errors[\"base\"] = result\n\n return self.async_show_form(\n step_id=\"user\", data_schema=DATA_SCHEMA_USER, errors=errors\n )\n\n @staticmethod\n @callback\n def async_get_options_flow(config_entry):\n \"\"\"Get the options flow for this handler.\"\"\"\n return OptionsFlowHandler(config_entry)\n\n\nclass OptionsFlowHandler(config_entries.OptionsFlow):\n \"\"\"Handle a option flow for Tuya.\"\"\"\n\n def __init__(self, config_entry: config_entries.ConfigEntry):\n \"\"\"Initialize options flow.\"\"\"\n self.config_entry = config_entry\n self._conf_devs_id = None\n self._conf_devs_option = {}\n self._form_error = None\n\n def _get_form_error(self):\n \"\"\"Set the error to be shown in the options form.\"\"\"\n errors = {}\n if self._form_error:\n errors[\"base\"] = self._form_error\n self._form_error = None\n return errors\n\n def _get_tuya_devices_filtered(self, types, exclude_mode=False, type_prefix=True):\n \"\"\"Get the list of Tuya device to filtered by types.\"\"\"\n config_list = {}\n types_filter = set(types)\n tuya = self.hass.data[DOMAIN][TUYA_DATA]\n devices_list = tuya.get_all_devices()\n for device in devices_list:\n dev_type = device.device_type()\n exclude = (\n dev_type in types_filter\n if exclude_mode\n else dev_type not in types_filter\n )\n if exclude:\n continue\n dev_id = device.object_id()\n if type_prefix:\n dev_id = f\"{dev_type}-{dev_id}\"\n config_list[dev_id] = f\"{device.name()} ({dev_type})\"\n\n return config_list\n\n def _get_device(self, dev_id):\n \"\"\"Get specific device from tuya library.\"\"\"\n tuya = self.hass.data[DOMAIN][TUYA_DATA]\n return tuya.get_device_by_id(dev_id)\n\n def _save_config(self, data):\n \"\"\"Save the updated options.\"\"\"\n curr_conf = self.config_entry.options.copy()\n curr_conf.update(data)\n curr_conf.update(self._conf_devs_option)\n\n return self.async_create_entry(title=\"\", data=curr_conf)\n\n async def _async_device_form(self, devs_id):\n \"\"\"Return configuration form for devices.\"\"\"\n conf_devs_id = []\n for count, dev_id in enumerate(devs_id):\n device_info = dev_id.split(\"-\")\n if count == 0:\n device_type = device_info[0]\n device_id = device_info[1]\n elif device_type != device_info[0]:\n self._form_error = ERROR_DEV_MULTI_TYPE\n return await self.async_step_init()\n conf_devs_id.append(device_info[1])\n\n device = self._get_device(device_id)\n if not device:\n self._form_error = ERROR_DEV_NOT_FOUND\n return await self.async_step_init()\n\n curr_conf = self._conf_devs_option.get(\n device_id, self.config_entry.options.get(device_id, {})\n )\n\n config_schema = await self._get_device_schema(device_type, curr_conf, device)\n if not config_schema:\n self._form_error = ERROR_DEV_NOT_CONFIG\n return await self.async_step_init()\n\n self._conf_devs_id = conf_devs_id\n device_name = (\n \"(multiple devices selected)\" if len(conf_devs_id) > 1 else device.name()\n )\n\n return self.async_show_form(\n step_id=\"device\",\n data_schema=config_schema,\n description_placeholders={\n \"device_type\": device_type,\n \"device_name\": device_name,\n },\n )\n\n async def async_step_init(self, user_input=None):\n \"\"\"Handle options flow.\"\"\"\n if user_input is not None:\n dev_ids = user_input.get(CONF_LIST_DEVICES)\n if dev_ids:\n return await self.async_step_device(None, dev_ids)\n\n user_input.pop(CONF_LIST_DEVICES, [])\n return self._save_config(data=user_input)\n\n data_schema = vol.Schema(\n {\n vol.Optional(\n CONF_DISCOVERY_INTERVAL,\n default=self.config_entry.options.get(\n CONF_DISCOVERY_INTERVAL, DEFAULT_DISCOVERY_INTERVAL\n ),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=30, max=900)),\n }\n )\n\n query_devices = self._get_tuya_devices_filtered(\n TUYA_TYPE_NOT_QUERY, True, False\n )\n if query_devices:\n devices = {ENTITY_MATCH_NONE: \"Default\"}\n devices.update(query_devices)\n def_val = self.config_entry.options.get(CONF_QUERY_DEVICE)\n if not def_val or not query_devices.get(def_val):\n def_val = ENTITY_MATCH_NONE\n data_schema = data_schema.extend(\n {\n vol.Optional(\n CONF_QUERY_INTERVAL,\n default=self.config_entry.options.get(\n CONF_QUERY_INTERVAL, DEFAULT_QUERY_INTERVAL\n ),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=30, max=240)),\n vol.Optional(CONF_QUERY_DEVICE, default=def_val): vol.In(devices),\n }\n )\n\n config_devices = self._get_tuya_devices_filtered(TUYA_TYPE_CONFIG, False, True)\n if config_devices:\n data_schema = data_schema.extend(\n {vol.Optional(CONF_LIST_DEVICES): cv.multi_select(config_devices)}\n )\n\n return self.async_show_form(\n step_id=\"init\",\n data_schema=data_schema,\n errors=self._get_form_error(),\n )\n\n async def async_step_device(self, user_input=None, dev_ids=None):\n \"\"\"Handle options flow for device.\"\"\"\n if dev_ids is not None:\n return await self._async_device_form(dev_ids)\n if user_input is not None:\n for device_id in self._conf_devs_id:\n self._conf_devs_option[device_id] = user_input\n\n return await self.async_step_init()\n\n async def _get_device_schema(self, device_type, curr_conf, device):\n \"\"\"Return option schema for device.\"\"\"\n if device_type == \"light\":\n return self._get_light_schema(curr_conf, device)\n if device_type == \"climate\":\n entities_list = await _get_entities_matching_domains(self.hass, [\"sensor\"])\n return self._get_climate_schema(curr_conf, device, entities_list)\n return None\n\n @staticmethod\n def _get_light_schema(curr_conf, device):\n \"\"\"Create option schema for light device.\"\"\"\n min_kelvin = device.max_color_temp()\n max_kelvin = device.min_color_temp()\n\n config_schema = vol.Schema(\n {\n vol.Optional(\n CONF_SUPPORT_COLOR,\n default=curr_conf.get(CONF_SUPPORT_COLOR, False),\n ): bool,\n vol.Optional(\n CONF_BRIGHTNESS_RANGE_MODE,\n default=curr_conf.get(CONF_BRIGHTNESS_RANGE_MODE, 0),\n ): vol.In({0: \"Range 1-255\", 1: \"Range 10-1000\"}),\n vol.Optional(\n CONF_MIN_KELVIN,\n default=curr_conf.get(CONF_MIN_KELVIN, min_kelvin),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=min_kelvin, max=max_kelvin)),\n vol.Optional(\n CONF_MAX_KELVIN,\n default=curr_conf.get(CONF_MAX_KELVIN, max_kelvin),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=min_kelvin, max=max_kelvin)),\n vol.Optional(\n CONF_TUYA_MAX_COLTEMP,\n default=curr_conf.get(\n CONF_TUYA_MAX_COLTEMP, DEFAULT_TUYA_MAX_COLTEMP\n ),\n ): vol.All(\n vol.Coerce(int),\n vol.Clamp(\n min=DEFAULT_TUYA_MAX_COLTEMP, max=DEFAULT_TUYA_MAX_COLTEMP * 10\n ),\n ),\n }\n )\n\n return config_schema\n\n @staticmethod\n def _get_climate_schema(curr_conf, device, entities_list):\n \"\"\"Create option schema for climate device.\"\"\"\n unit = device.temperature_unit()\n def_unit = TEMP_FAHRENHEIT if unit == \"FAHRENHEIT\" else TEMP_CELSIUS\n entities_list.insert(0, ENTITY_MATCH_NONE)\n\n config_schema = vol.Schema(\n {\n vol.Optional(\n CONF_UNIT_OF_MEASUREMENT,\n default=curr_conf.get(CONF_UNIT_OF_MEASUREMENT, def_unit),\n ): vol.In({TEMP_CELSIUS: \"Celsius\", TEMP_FAHRENHEIT: \"Fahrenheit\"}),\n vol.Optional(\n CONF_TEMP_DIVIDER,\n default=curr_conf.get(CONF_TEMP_DIVIDER, 0),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=0)),\n vol.Optional(\n CONF_CURR_TEMP_DIVIDER,\n default=curr_conf.get(CONF_CURR_TEMP_DIVIDER, 0),\n ): vol.All(vol.Coerce(int), vol.Clamp(min=0)),\n vol.Optional(\n CONF_MIN_TEMP,\n default=curr_conf.get(CONF_MIN_TEMP, 0),\n ): int,\n vol.Optional(\n CONF_MAX_TEMP,\n default=curr_conf.get(CONF_MAX_TEMP, 0),\n ): int,\n vol.Optional(\n CONF_EXT_TEMP_SENSOR,\n default=curr_conf.get(CONF_EXT_TEMP_SENSOR, ENTITY_MATCH_NONE),\n ): vol.In(entities_list),\n }\n )\n\n return config_schema\n\n\nasync def _get_entities_matching_domains(hass, domains):\n \"\"\"List entities in the given domains.\"\"\"\n included_domains = set(domains)\n entity_ids = hass.states.async_entity_ids(included_domains)\n entity_ids.sort()\n return entity_ids\n\n"}}},{"rowIdx":336,"cells":{"text":{"kind":"string","value":"\nfrom Handler import Handler\nimport MySQLdb\n\n\nclass MySQLHandler(Handler):\n \"\"\"\n Implements the abstract Handler class, sending data to a mysql table\n \"\"\"\n conn = None\n\n def __init__(self, config=None):\n \"\"\"\n Create a new instance of the MySQLHandler class\n \"\"\"\n # Initialize Handler\n Handler.__init__(self, config)\n\n # Initialize Options\n self.hostname = self.config['hostname']\n self.port = int(self.config['port'])\n self.username = self.config['username']\n self.password = self.config['password']\n self.database = self.config['database']\n self.table = self.config['table']\n self.col_time = self.config['col_time']\n self.col_metric = self.config['col_metric']\n self.col_value = self.config['col_value']\n\n # Connect\n self._connect()\n\n def get_default_config_help(self):\n \"\"\"\n Returns the help text for the configuration options for this handler\n \"\"\"\n config = super(MySQLHandler, self).get_default_config_help()\n\n config.update({\n })\n\n return config\n\n def get_default_config(self):\n \"\"\"\n Return the default config for the handler\n \"\"\"\n config = super(MySQLHandler, self).get_default_config()\n\n config.update({\n })\n\n return config\n\n def __del__(self):\n \"\"\"\n Destroy instance of the MySQLHandler class\n \"\"\"\n self._close()\n\n def process(self, metric):\n \"\"\"\n Process a metric\n \"\"\"\n # Just send the data\n self._send(str(metric))\n\n def _send(self, data):\n \"\"\"\n Insert the data\n \"\"\"\n data = data.strip().split(' ')\n try:\n cursor = self.conn.cursor()\n cursor.execute(\"INSERT INTO %s (%s, %s, %s) VALUES(%%s, %%s, %%s)\"\n % (self.table, self.col_metric,\n self.col_time, self.col_value),\n (data[0], data[2], data[1]))\n cursor.close()\n self.conn.commit()\n except BaseException as e:\n # Log Error\n self.log.error(\"MySQLHandler: Failed sending data. %s.\", e)\n # Attempt to restablish connection\n self._connect()\n\n def _connect(self):\n \"\"\"\n Connect to the MySQL server\n \"\"\"\n self._close()\n self.conn = MySQLdb.Connect(host=self.hostname,\n port=self.port,\n user=self.username,\n passwd=self.password,\n db=self.database)\n\n def _close(self):\n \"\"\"\n Close the connection\n \"\"\"\n if self.conn:\n self.conn.commit()\n self.conn.close()\n\n"}}},{"rowIdx":337,"cells":{"text":{"kind":"string","value":"\nimport requests\n\nfrom homeassistant.components.switch import SwitchEntity\nfrom homeassistant.const import (\n ATTR_TEMPERATURE,\n CONF_DEVICES,\n ENERGY_KILO_WATT_HOUR,\n TEMP_CELSIUS,\n)\n\nfrom .const import (\n ATTR_STATE_DEVICE_LOCKED,\n ATTR_STATE_LOCKED,\n ATTR_TEMPERATURE_UNIT,\n ATTR_TOTAL_CONSUMPTION,\n ATTR_TOTAL_CONSUMPTION_UNIT,\n CONF_CONNECTIONS,\n DOMAIN as FRITZBOX_DOMAIN,\n LOGGER,\n)\n\nATTR_TOTAL_CONSUMPTION_UNIT_VALUE = ENERGY_KILO_WATT_HOUR\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the Fritzbox smarthome switch from config_entry.\"\"\"\n entities = []\n devices = hass.data[FRITZBOX_DOMAIN][CONF_DEVICES]\n fritz = hass.data[FRITZBOX_DOMAIN][CONF_CONNECTIONS][config_entry.entry_id]\n\n for device in await hass.async_add_executor_job(fritz.get_devices):\n if device.has_switch and device.ain not in devices:\n entities.append(FritzboxSwitch(device, fritz))\n devices.add(device.ain)\n\n async_add_entities(entities)\n\n\nclass FritzboxSwitch(SwitchEntity):\n \"\"\"The switch class for Fritzbox switches.\"\"\"\n\n def __init__(self, device, fritz):\n \"\"\"Initialize the switch.\"\"\"\n self._device = device\n self._fritz = fritz\n\n @property\n def device_info(self):\n \"\"\"Return device specific attributes.\"\"\"\n return {\n \"name\": self.name,\n \"identifiers\": {(FRITZBOX_DOMAIN, self._device.ain)},\n \"manufacturer\": self._device.manufacturer,\n \"model\": self._device.productname,\n \"sw_version\": self._device.fw_version,\n }\n\n @property\n def unique_id(self):\n \"\"\"Return the unique ID of the device.\"\"\"\n return self._device.ain\n\n @property\n def available(self):\n \"\"\"Return if switch is available.\"\"\"\n return self._device.present\n\n @property\n def name(self):\n \"\"\"Return the name of the device.\"\"\"\n return self._device.name\n\n @property\n def is_on(self):\n \"\"\"Return true if the switch is on.\"\"\"\n return self._device.switch_state\n\n def turn_on(self, **kwargs):\n \"\"\"Turn the switch on.\"\"\"\n self._device.set_switch_state_on()\n\n def turn_off(self, **kwargs):\n \"\"\"Turn the switch off.\"\"\"\n self._device.set_switch_state_off()\n\n def update(self):\n \"\"\"Get latest data and states from the device.\"\"\"\n try:\n self._device.update()\n except requests.exceptions.HTTPError as ex:\n LOGGER.warning(\"Fritzhome connection error: %s\", ex)\n self._fritz.login()\n\n @property\n def device_state_attributes(self):\n \"\"\"Return the state attributes of the device.\"\"\"\n attrs = {}\n attrs[ATTR_STATE_DEVICE_LOCKED] = self._device.device_lock\n attrs[ATTR_STATE_LOCKED] = self._device.lock\n\n if self._device.has_powermeter:\n attrs[\n ATTR_TOTAL_CONSUMPTION\n ] = f\"{((self._device.energy or 0.0) / 1000):.3f}\"\n attrs[ATTR_TOTAL_CONSUMPTION_UNIT] = ATTR_TOTAL_CONSUMPTION_UNIT_VALUE\n if self._device.has_temperature_sensor:\n attrs[ATTR_TEMPERATURE] = str(\n self.hass.config.units.temperature(\n self._device.temperature, TEMP_CELSIUS\n )\n )\n attrs[ATTR_TEMPERATURE_UNIT] = self.hass.config.units.temperature_unit\n return attrs\n\n @property\n def current_power_w(self):\n \"\"\"Return the current power usage in W.\"\"\"\n return self._device.power / 1000\n\n"}}},{"rowIdx":338,"cells":{"text":{"kind":"string","value":"\nimport base64\nimport io\n\nimport aiohttp\nimport pytest\nfrom voluptuous.error import MultipleInvalid\n\nfrom homeassistant.components.color_extractor import (\n ATTR_PATH,\n ATTR_URL,\n DOMAIN,\n SERVICE_TURN_ON,\n)\nfrom homeassistant.components.light import (\n ATTR_BRIGHTNESS,\n ATTR_BRIGHTNESS_PCT,\n ATTR_RGB_COLOR,\n DOMAIN as LIGHT_DOMAIN,\n SERVICE_TURN_OFF as LIGHT_SERVICE_TURN_OFF,\n)\nfrom homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON\nfrom homeassistant.setup import async_setup_component\nimport homeassistant.util.color as color_util\n\nfrom tests.async_mock import Mock, mock_open, patch\nfrom tests.common import load_fixture\n\nLIGHT_ENTITY = \"light.kitchen_lights\"\nCLOSE_THRESHOLD = 10\n\n\ndef _close_enough(actual_rgb, testing_rgb):\n \"\"\"Validate the given RGB value is in acceptable tolerance.\"\"\"\n # Convert the given RGB values to hue / saturation and then back again\n # as it wasn't reading the same RGB value set against it.\n actual_hs = color_util.color_RGB_to_hs(*actual_rgb)\n actual_rgb = color_util.color_hs_to_RGB(*actual_hs)\n\n testing_hs = color_util.color_RGB_to_hs(*testing_rgb)\n testing_rgb = color_util.color_hs_to_RGB(*testing_hs)\n\n actual_red, actual_green, actual_blue = actual_rgb\n testing_red, testing_green, testing_blue = testing_rgb\n\n r_diff = abs(actual_red - testing_red)\n g_diff = abs(actual_green - testing_green)\n b_diff = abs(actual_blue - testing_blue)\n\n return (\n r_diff <= CLOSE_THRESHOLD\n and g_diff <= CLOSE_THRESHOLD\n and b_diff <= CLOSE_THRESHOLD\n )\n\n\n@pytest.fixture(autouse=True)\nasync def setup_light(hass):\n \"\"\"Configure our light component to work against for testing.\"\"\"\n assert await async_setup_component(\n hass, LIGHT_DOMAIN, {LIGHT_DOMAIN: {\"platform\": \"demo\"}}\n )\n await hass.async_block_till_done()\n\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n\n # Validate starting values\n assert state.state == STATE_ON\n assert state.attributes.get(ATTR_BRIGHTNESS) == 180\n assert state.attributes.get(ATTR_RGB_COLOR) == (255, 63, 111)\n\n await hass.services.async_call(\n LIGHT_DOMAIN,\n LIGHT_SERVICE_TURN_OFF,\n {ATTR_ENTITY_ID: LIGHT_ENTITY},\n blocking=True,\n )\n await hass.async_block_till_done()\n\n state = hass.states.get(LIGHT_ENTITY)\n\n assert state\n assert state.state == STATE_OFF\n\n\nasync def test_missing_url_and_path(hass):\n \"\"\"Test that nothing happens when url and path are missing.\"\"\"\n # Load our color_extractor component\n await async_setup_component(\n hass,\n DOMAIN,\n {},\n )\n await hass.async_block_till_done()\n\n # Validate pre service call\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n # Missing url and path attributes, should cause error log\n service_data = {\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n }\n\n with pytest.raises(MultipleInvalid):\n await hass.services.async_call(\n DOMAIN, SERVICE_TURN_ON, service_data, blocking=True\n )\n await hass.async_block_till_done()\n\n # check light is still off, unchanged due to bad parameters on service call\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n\nasync def _async_load_color_extractor_url(hass, service_data):\n # Load our color_extractor component\n await async_setup_component(\n hass,\n DOMAIN,\n {},\n )\n await hass.async_block_till_done()\n\n # Validate pre service call\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n # Call the shared service, our above mock should return the base64 decoded fixture 1x1 pixel\n assert await hass.services.async_call(\n DOMAIN, SERVICE_TURN_ON, service_data, blocking=True\n )\n\n await hass.async_block_till_done()\n\n\nasync def test_url_success(hass, aioclient_mock):\n \"\"\"Test that a successful image GET translate to light RGB.\"\"\"\n service_data = {\n ATTR_URL: \"http://example.com/images/logo.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n # Standard light service data which we pass\n ATTR_BRIGHTNESS_PCT: 50,\n }\n\n # Mock the HTTP Response with a base64 encoded 1x1 pixel\n aioclient_mock.get(\n url=service_data[ATTR_URL],\n content=base64.b64decode(load_fixture(\"color_extractor_url.txt\")),\n )\n\n # Allow access to this URL using the proper mechanism\n hass.config.allowlist_external_urls.add(\"http://example.com/images/\")\n\n await _async_load_color_extractor_url(hass, service_data)\n\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n\n # Ensure we turned it on\n assert state.state == STATE_ON\n\n # Brightness has changed, optional service call field\n assert state.attributes[ATTR_BRIGHTNESS] == 128\n\n # Ensure the RGB values are correct\n assert _close_enough(state.attributes[ATTR_RGB_COLOR], (50, 100, 150))\n\n\nasync def test_url_not_allowed(hass, aioclient_mock):\n \"\"\"Test that a not allowed external URL fails to turn light on.\"\"\"\n service_data = {\n ATTR_URL: \"http://denied.com/images/logo.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n }\n\n await _async_load_color_extractor_url(hass, service_data)\n\n # Light has not been modified due to failure\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n\nasync def test_url_exception(hass, aioclient_mock):\n \"\"\"Test that a HTTPError fails to turn light on.\"\"\"\n service_data = {\n ATTR_URL: \"http://example.com/images/logo.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n }\n\n # Don't let the URL not being allowed sway our exception test\n hass.config.allowlist_external_urls.add(\"http://example.com/images/\")\n\n # Mock the HTTP Response with an HTTPError\n aioclient_mock.get(url=service_data[ATTR_URL], exc=aiohttp.ClientError)\n\n await _async_load_color_extractor_url(hass, service_data)\n\n # Light has not been modified due to failure\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n\nasync def test_url_error(hass, aioclient_mock):\n \"\"\"Test that a HTTP Error (non 200) doesn't turn light on.\"\"\"\n service_data = {\n ATTR_URL: \"http://example.com/images/logo.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n }\n\n # Don't let the URL not being allowed sway our exception test\n hass.config.allowlist_external_urls.add(\"http://example.com/images/\")\n\n # Mock the HTTP Response with a 400 Bad Request error\n aioclient_mock.get(url=service_data[ATTR_URL], status=400)\n\n await _async_load_color_extractor_url(hass, service_data)\n\n # Light has not been modified due to failure\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n\n@patch(\n \"builtins.open\",\n mock_open(read_data=base64.b64decode(load_fixture(\"color_extractor_file.txt\"))),\n create=True,\n)\ndef _get_file_mock(file_path):\n \"\"\"Convert file to BytesIO for testing due to PIL UnidentifiedImageError.\"\"\"\n _file = None\n\n with open(file_path) as file_handler:\n _file = io.BytesIO(file_handler.read())\n\n _file.name = \"color_extractor.jpg\"\n _file.seek(0)\n\n return _file\n\n\n@patch(\"os.path.isfile\", Mock(return_value=True))\n@patch(\"os.access\", Mock(return_value=True))\nasync def test_file(hass):\n \"\"\"Test that the file only service reads a file and translates to light RGB.\"\"\"\n service_data = {\n ATTR_PATH: \"/opt/image.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n # Standard light service data which we pass\n ATTR_BRIGHTNESS_PCT: 100,\n }\n\n # Add our /opt/ path to the allowed list of paths\n hass.config.allowlist_external_dirs.add(\"/opt/\")\n\n await async_setup_component(hass, DOMAIN, {})\n await hass.async_block_till_done()\n\n # Verify pre service check\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n # Mock the file handler read with our 1x1 base64 encoded fixture image\n with patch(\"homeassistant.components.color_extractor._get_file\", _get_file_mock):\n await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, service_data)\n await hass.async_block_till_done()\n\n state = hass.states.get(LIGHT_ENTITY)\n\n assert state\n\n # Ensure we turned it on\n assert state.state == STATE_ON\n\n # And set the brightness\n assert state.attributes[ATTR_BRIGHTNESS] == 255\n\n # Ensure the RGB values are correct\n assert _close_enough(state.attributes[ATTR_RGB_COLOR], (25, 75, 125))\n\n\n@patch(\"os.path.isfile\", Mock(return_value=True))\n@patch(\"os.access\", Mock(return_value=True))\nasync def test_file_denied_dir(hass):\n \"\"\"Test that the file only service fails to read an image in a dir not explicitly allowed.\"\"\"\n service_data = {\n ATTR_PATH: \"/path/to/a/dir/not/allowed/image.png\",\n ATTR_ENTITY_ID: LIGHT_ENTITY,\n # Standard light service data which we pass\n ATTR_BRIGHTNESS_PCT: 100,\n }\n\n await async_setup_component(hass, DOMAIN, {})\n await hass.async_block_till_done()\n\n # Verify pre service check\n state = hass.states.get(LIGHT_ENTITY)\n assert state\n assert state.state == STATE_OFF\n\n # Mock the file handler read with our 1x1 base64 encoded fixture image\n with patch(\"homeassistant.components.color_extractor._get_file\", _get_file_mock):\n await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, service_data)\n await hass.async_block_till_done()\n\n state = hass.states.get(LIGHT_ENTITY)\n\n assert state\n\n # Ensure it's still off due to access error (dir not explicitly allowed)\n assert state.state == STATE_OFF\n\n"}}},{"rowIdx":339,"cells":{"text":{"kind":"string","value":"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport inspect\nimport json\nimport os\nimport sys\n\nimport tensorflow as tf\nfrom tensorflow import errors\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.training import training\n\ndef _load_library(filename, lib=\"op\"):\n \"\"\"_load_library\"\"\"\n f = inspect.getfile(sys._getframe(1)) # pylint: disable=protected-access\n\n # Construct filename\n f = os.path.join(os.path.dirname(f), filename)\n filenames = [f]\n\n # Function to load the library, return True if file system library is loaded\n load_fn = tf.load_op_library if lib == \"op\" \\\n else lambda f: tf.compat.v1.load_file_system_library(f) is None\n\n # Try to load all paths for file, fail if none succeed\n errs = []\n for f in filenames:\n try:\n l = load_fn(f)\n if l is not None:\n return l\n except errors.NotFoundError as e:\n errs.append(str(e))\n raise NotImplementedError(\n \"unable to open file: \" +\n \"{}, from paths: {}\\ncaused by: {}\".format(filename, filenames, errs))\n\n_gcs_config_so = _load_library(\"_gcs_config_ops.so\")\ngcs_configure_credentials = _gcs_config_so.gcs_configure_credentials\ngcs_configure_block_cache = _gcs_config_so.gcs_configure_block_cache\n\nclass BlockCacheParams(object): # pylint: disable=useless-object-inheritance\n \"\"\"BlockCacheParams is a struct used for configuring the GCS Block Cache.\"\"\"\n\n def __init__(self, block_size=None, max_bytes=None, max_staleness=None):\n self._block_size = block_size or 128 * 1024 * 1024\n self._max_bytes = max_bytes or 2 * self._block_size\n self._max_staleness = max_staleness or 0\n\n @property\n def block_size(self):\n return self._block_size\n\n @property\n def max_bytes(self):\n return self._max_bytes\n\n @property\n def max_staleness(self):\n return self._max_staleness\n\ndef configure_gcs(credentials=None, block_cache=None, device=None):\n \"\"\"Configures the GCS file system for a given a session.\n\n Warning: GCS `credentials` may be transmitted over the network unencrypted.\n Please ensure that the network is trusted before using this function. For\n users running code entirely within Google Cloud, your data is protected by\n encryption in between data centers. For more information, please take a look\n at https://cloud.google.com/security/encryption-in-transit/.\n\n Args:\n credentials: [Optional.] A JSON string\n block_cache: [Optional.] A BlockCacheParams to configure the block cache .\n device: [Optional.] The device to place the configure ops.\n \"\"\"\n def configure(credentials, block_cache):\n \"\"\"Helper function to actually configure GCS.\"\"\"\n if credentials:\n if isinstance(credentials, dict):\n credentials = json.dumps(credentials)\n creds = gcs_configure_credentials(credentials)\n else:\n creds = tf.constant(0)\n\n if block_cache:\n cache = gcs_configure_block_cache(\n max_cache_size=block_cache.max_bytes,\n block_size=block_cache.block_size,\n max_staleness=block_cache.max_staleness)\n else:\n cache = tf.constant(0)\n\n return tf.tuple([creds, cache])\n\n if device:\n with ops.device(device):\n return configure(credentials, block_cache)\n return configure(credentials, block_cache)\n\ndef configure_gcs_from_colab_auth(device='/job:worker/replica:0/task:0/device:CPU:0'):\n \"\"\"ConfigureColabSession configures the GCS file system in Colab.\n\n Args:\n \"\"\"\n # Read from the application default credentials (adc).\n adc_filename = os.environ.get(\n 'GOOGLE_APPLICATION_CREDENTIALS', '/content/adc.json')\n with open(adc_filename) as f:\n data = json.load(f)\n return configure_gcs(credentials=data, device=device)\n\n\n\n"}}},{"rowIdx":340,"cells":{"text":{"kind":"string","value":"\nimport logging\n\nfrom homeassistant.components.media_player import BrowseMedia\nfrom homeassistant.components.media_player.const import (\n MEDIA_CLASS_ALBUM,\n MEDIA_CLASS_ARTIST,\n MEDIA_CLASS_DIRECTORY,\n MEDIA_CLASS_EPISODE,\n MEDIA_CLASS_MOVIE,\n MEDIA_CLASS_PLAYLIST,\n MEDIA_CLASS_SEASON,\n MEDIA_CLASS_TRACK,\n MEDIA_CLASS_TV_SHOW,\n MEDIA_CLASS_VIDEO,\n)\nfrom homeassistant.components.media_player.errors import BrowseError\n\nfrom .const import DOMAIN\n\n\nclass UnknownMediaType(BrowseError):\n \"\"\"Unknown media type.\"\"\"\n\n\nEXPANDABLES = [\"album\", \"artist\", \"playlist\", \"season\", \"show\"]\nPLAYLISTS_BROWSE_PAYLOAD = {\n \"title\": \"Playlists\",\n \"media_class\": MEDIA_CLASS_DIRECTORY,\n \"media_content_id\": \"all\",\n \"media_content_type\": \"playlists\",\n \"can_play\": False,\n \"can_expand\": True,\n}\nSPECIAL_METHODS = {\n \"On Deck\": \"onDeck\",\n \"Recently Added\": \"recentlyAdded\",\n}\n\nITEM_TYPE_MEDIA_CLASS = {\n \"album\": MEDIA_CLASS_ALBUM,\n \"artist\": MEDIA_CLASS_ARTIST,\n \"episode\": MEDIA_CLASS_EPISODE,\n \"movie\": MEDIA_CLASS_MOVIE,\n \"playlist\": MEDIA_CLASS_PLAYLIST,\n \"season\": MEDIA_CLASS_SEASON,\n \"show\": MEDIA_CLASS_TV_SHOW,\n \"track\": MEDIA_CLASS_TRACK,\n \"video\": MEDIA_CLASS_VIDEO,\n}\n\n_LOGGER = logging.getLogger(__name__)\n\n\ndef browse_media(\n entity_id, plex_server, media_content_type=None, media_content_id=None\n):\n \"\"\"Implement the websocket media browsing helper.\"\"\"\n\n def build_item_response(payload):\n \"\"\"Create response payload for the provided media query.\"\"\"\n media = plex_server.lookup_media(**payload)\n\n if media is None:\n return None\n\n try:\n media_info = item_payload(media)\n except UnknownMediaType:\n return None\n if media_info.can_expand:\n media_info.children = []\n for item in media:\n try:\n media_info.children.append(item_payload(item))\n except UnknownMediaType:\n continue\n return media_info\n\n if media_content_id and \":\" in media_content_id:\n media_content_id, special_folder = media_content_id.split(\":\")\n else:\n special_folder = None\n\n if (\n media_content_type\n and media_content_type == \"server\"\n and media_content_id != plex_server.machine_identifier\n ):\n raise BrowseError(\n f\"Plex server with ID '{media_content_id}' is not associated with {entity_id}\"\n )\n\n if special_folder:\n if media_content_type == \"server\":\n library_or_section = plex_server.library\n children_media_class = MEDIA_CLASS_DIRECTORY\n title = plex_server.friendly_name\n elif media_content_type == \"library\":\n library_or_section = plex_server.library.sectionByID(media_content_id)\n title = library_or_section.title\n try:\n children_media_class = ITEM_TYPE_MEDIA_CLASS[library_or_section.TYPE]\n except KeyError as err:\n raise BrowseError(\n f\"Unknown type received: {library_or_section.TYPE}\"\n ) from err\n else:\n raise BrowseError(\n f\"Media not found: {media_content_type} / {media_content_id}\"\n )\n\n payload = {\n \"title\": title,\n \"media_class\": MEDIA_CLASS_DIRECTORY,\n \"media_content_id\": f\"{media_content_id}:{special_folder}\",\n \"media_content_type\": media_content_type,\n \"can_play\": False,\n \"can_expand\": True,\n \"children\": [],\n \"children_media_class\": children_media_class,\n }\n\n method = SPECIAL_METHODS[special_folder]\n items = getattr(library_or_section, method)()\n for item in items:\n try:\n payload[\"children\"].append(item_payload(item))\n except UnknownMediaType:\n continue\n\n return BrowseMedia(**payload)\n\n try:\n if media_content_type in [\"server\", None]:\n return server_payload(plex_server)\n\n if media_content_type == \"library\":\n return library_payload(plex_server, media_content_id)\n\n except UnknownMediaType as err:\n raise BrowseError(\n f\"Media not found: {media_content_type} / {media_content_id}\"\n ) from err\n\n if media_content_type == \"playlists\":\n return playlists_payload(plex_server)\n\n payload = {\n \"media_type\": DOMAIN,\n \"plex_key\": int(media_content_id),\n }\n response = build_item_response(payload)\n if response is None:\n raise BrowseError(f\"Media not found: {media_content_type} / {media_content_id}\")\n return response\n\n\ndef item_payload(item):\n \"\"\"Create response payload for a single media item.\"\"\"\n try:\n media_class = ITEM_TYPE_MEDIA_CLASS[item.type]\n except KeyError as err:\n _LOGGER.debug(\"Unknown type received: %s\", item.type)\n raise UnknownMediaType from err\n payload = {\n \"title\": item.title,\n \"media_class\": media_class,\n \"media_content_id\": str(item.ratingKey),\n \"media_content_type\": item.type,\n \"can_play\": True,\n \"can_expand\": item.type in EXPANDABLES,\n }\n if hasattr(item, \"thumbUrl\"):\n payload[\"thumbnail\"] = item.thumbUrl\n\n return BrowseMedia(**payload)\n\n\ndef library_section_payload(section):\n \"\"\"Create response payload for a single library section.\"\"\"\n try:\n children_media_class = ITEM_TYPE_MEDIA_CLASS[section.TYPE]\n except KeyError as err:\n _LOGGER.debug(\"Unknown type received: %s\", section.TYPE)\n raise UnknownMediaType from err\n return BrowseMedia(\n title=section.title,\n media_class=MEDIA_CLASS_DIRECTORY,\n media_content_id=section.key,\n media_content_type=\"library\",\n can_play=False,\n can_expand=True,\n children_media_class=children_media_class,\n )\n\n\ndef special_library_payload(parent_payload, special_type):\n \"\"\"Create response payload for special library folders.\"\"\"\n title = f\"{special_type} ({parent_payload.title})\"\n return BrowseMedia(\n title=title,\n media_class=parent_payload.media_class,\n media_content_id=f\"{parent_payload.media_content_id}:{special_type}\",\n media_content_type=parent_payload.media_content_type,\n can_play=False,\n can_expand=True,\n children_media_class=parent_payload.children_media_class,\n )\n\n\ndef server_payload(plex_server):\n \"\"\"Create response payload to describe libraries of the Plex server.\"\"\"\n server_info = BrowseMedia(\n title=plex_server.friendly_name,\n media_class=MEDIA_CLASS_DIRECTORY,\n media_content_id=plex_server.machine_identifier,\n media_content_type=\"server\",\n can_play=False,\n can_expand=True,\n children_media_class=MEDIA_CLASS_DIRECTORY,\n )\n server_info.children = []\n server_info.children.append(special_library_payload(server_info, \"On Deck\"))\n server_info.children.append(special_library_payload(server_info, \"Recently Added\"))\n for library in plex_server.library.sections():\n if library.type == \"photo\":\n continue\n server_info.children.append(library_section_payload(library))\n server_info.children.append(BrowseMedia(**PLAYLISTS_BROWSE_PAYLOAD))\n return server_info\n\n\ndef library_payload(plex_server, library_id):\n \"\"\"Create response payload to describe contents of a specific library.\"\"\"\n library = plex_server.library.sectionByID(library_id)\n library_info = library_section_payload(library)\n library_info.children = []\n library_info.children.append(special_library_payload(library_info, \"On Deck\"))\n library_info.children.append(\n special_library_payload(library_info, \"Recently Added\")\n )\n for item in library.all():\n try:\n library_info.children.append(item_payload(item))\n except UnknownMediaType:\n continue\n return library_info\n\n\ndef playlists_payload(plex_server):\n \"\"\"Create response payload for all available playlists.\"\"\"\n playlists_info = {**PLAYLISTS_BROWSE_PAYLOAD, \"children\": []}\n for playlist in plex_server.playlists():\n try:\n playlists_info[\"children\"].append(item_payload(playlist))\n except UnknownMediaType:\n continue\n response = BrowseMedia(**playlists_info)\n response.children_media_class = MEDIA_CLASS_PLAYLIST\n return response\n\n"}}},{"rowIdx":341,"cells":{"text":{"kind":"string","value":"\nimport numpy as np\nimport unittest\n\nfrom chainer import testing\n\nfrom chainercv.visualizations import vis_semantic_segmentation\n\ntry:\n import matplotlib # NOQA\n _available = True\nexcept ImportError:\n _available = False\n\n\n@testing.parameterize(*testing.product({\n 'label_names': [None, ('class0', 'class1', 'class2')],\n 'label_colors': [None, ((255, 0, 0), (0, 255, 0), (0, 0, 255))],\n 'all_label_names_in_legend': [False, True],\n 'no_img': [False, True],\n}))\n@unittest.skipUnless(_available, 'Matplotlib is not installed')\nclass TestVisSemanticSegmentation(unittest.TestCase):\n\n def setUp(self):\n if self.no_img:\n self.img = None\n else:\n self.img = np.random.randint(0, 255, size=(3, 32, 48))\n self.label = np.random.randint(\n -1, 3, size=(48, 64)).astype(np.int32)\n\n def test_vis_semantic_segmentation(self):\n ax, legend_handles = vis_semantic_segmentation(\n self.img, self.label,\n label_names=self.label_names, label_colors=self.label_colors,\n all_label_names_in_legend=self.all_label_names_in_legend)\n\n self.assertIsInstance(ax, matplotlib.axes.Axes)\n for handle in legend_handles:\n self.assertIsInstance(handle, matplotlib.patches.Patch)\n\n\n@unittest.skipUnless(_available, 'Matplotlib is not installed')\nclass TestVisSemanticSegmentationInvalidArguments(unittest.TestCase):\n\n def test_vis_semantic_segmentation_mismatch_names_and_colors(self):\n label = np.random.randint(-1, 2, size=(48, 64)).astype(np.int32)\n with self.assertRaises(ValueError):\n vis_semantic_segmentation(\n None, label,\n label_names=('class0', 'class1', 'class2'),\n label_colors=((255, 0, 0), (0, 255, 0)))\n\n def test_vis_semantic_segmentation_exceed_value(self):\n label = np.random.randint(10, 20, size=(48, 64)).astype(np.int32)\n with self.assertRaises(ValueError):\n vis_semantic_segmentation(\n None, label,\n label_names=('class0', 'class1', 'class2'))\n\n\ntesting.run_module(__name__, __file__)\n\n"}}},{"rowIdx":342,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.components.group import GroupIntegrationRegistry\nfrom homeassistant.const import STATE_OFF\nfrom homeassistant.core import callback\nfrom homeassistant.helpers.typing import HomeAssistantType\n\nfrom . import (\n STATE_ECO,\n STATE_ELECTRIC,\n STATE_GAS,\n STATE_HEAT_PUMP,\n STATE_HIGH_DEMAND,\n STATE_PERFORMANCE,\n)\n\n\n@callback\ndef async_describe_on_off_states(\n hass: HomeAssistantType, registry: GroupIntegrationRegistry\n) -> None:\n \"\"\"Describe group on off states.\"\"\"\n registry.on_off_states(\n {\n STATE_ECO,\n STATE_ELECTRIC,\n STATE_PERFORMANCE,\n STATE_HIGH_DEMAND,\n STATE_HEAT_PUMP,\n STATE_GAS,\n },\n STATE_OFF,\n )\n\n"}}},{"rowIdx":343,"cells":{"text":{"kind":"string","value":"\nimport os.path\nimport subprocess\n\nGIT_PATHS = [\n \"/usr/lib/git\",\n \"/usr/lib/git-core\",\n \"/usr/libexec/git\",\n \"/usr/libexec/git-core\",\n]\n\n\ndef find_git_http_backend():\n \"\"\"Find Git HTTP back-end.\"\"\"\n if hasattr(find_git_http_backend, \"result\"):\n return find_git_http_backend.result\n\n try:\n path = subprocess.run(\n [\"git\", \"--exec-path\"],\n universal_newlines=True,\n check=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n ).stdout.strip()\n if path:\n GIT_PATHS.insert(0, path)\n except OSError:\n pass\n\n for path in GIT_PATHS:\n name = os.path.join(path, \"git-http-backend\")\n if os.path.exists(name):\n find_git_http_backend.result = name\n return name\n return None\n\n"}}},{"rowIdx":344,"cells":{"text":{"kind":"string","value":"\nfrom app.wraps.login_wrap import login_required\nfrom app import app, v\nfrom app.utils import ResponseUtil, RequestUtil, AuthUtil\nfrom app.database.model import Collaborator, User\n\n\n# get server list\n@app.route('/api/collaborator/list', methods=['GET'])\n@login_required()\n@v.param({'webhook_id': v.int()})\ndef api_collaborator_list(webhook_id):\n # login user\n user_id = RequestUtil.get_login_user().get('id', '')\n\n if not AuthUtil.has_readonly_auth(user_id, webhook_id):\n return ResponseUtil.standard_response(0, 'Permission deny!')\n\n collaborators = Collaborator.query.filter_by(webhook_id=webhook_id).all()\n collaborators = [collaborator.dict() for collaborator in collaborators]\n\n return ResponseUtil.standard_response(1, collaborators)\n\n\n# new server\n@app.route('/api/collaborator/new', methods=['POST'])\n@login_required()\n@v.param({'webhook_id': v.int(), 'user_id': v.str()})\ndef api_collaborator_new(webhook_id, user_id):\n # login user\n login_user_id = RequestUtil.get_login_user().get('id', '')\n\n if login_user_id == user_id:\n return ResponseUtil.standard_response(0, '`%s` is Creator!' % user_id)\n\n if not AuthUtil.has_admin_auth(login_user_id, webhook_id):\n return ResponseUtil.standard_response(0, 'Permission deny!')\n\n collaborator = Collaborator.query.filter_by(webhook_id=webhook_id,\n user_id=user_id).first()\n\n # not exist\n if collaborator:\n return ResponseUtil.standard_response(0, 'Collaborator exist!')\n\n # 开始添加\n user = User.query.get(user_id)\n if not user:\n user = User(id=user_id, name=user_id)\n user.save()\n collaborator = Collaborator(webhook_id=webhook_id, user=user)\n\n collaborator.save()\n\n return ResponseUtil.standard_response(1, collaborator.dict())\n\n\n@app.route('/api/collaborator/delete', methods=['POST'])\n@login_required()\n@v.param({'collaborator_id': v.int()})\ndef api_collaborator_delete(collaborator_id):\n # login user\n user_id = RequestUtil.get_login_user().get('id', '')\n\n collaborator = Collaborator.query.get(collaborator_id)\n if not collaborator:\n return ResponseUtil.standard_response(0, 'Permission deny!')\n\n webhook_id = collaborator.webhook_id\n\n if not AuthUtil.has_admin_auth(user_id, webhook_id):\n return ResponseUtil.standard_response(0, 'Permission deny!')\n\n collaborator.delete()\n\n return ResponseUtil.standard_response(1, 'Success')\n\n"}}},{"rowIdx":345,"cells":{"text":{"kind":"string","value":"\nfrom cerberus.base import normalize_rulesset, UnconcernedValidator\nfrom cerberus.schema import ValidatedSchema\n\n\nclass Validator(UnconcernedValidator):\n @property\n def allow_unknown(self):\n \"\"\"\n If ``True`` unknown fields that are not defined in the schema will be ignored.\n If a mapping with a validation schema is given, any undefined field will be\n validated against its rules. Also see :ref:`allowing-the-unknown`.\n\n Type: :class:`bool` or any :term:`mapping`\n \"\"\"\n return self._config.get('allow_unknown', False)\n\n @allow_unknown.setter\n def allow_unknown(self, value):\n if not (self.is_child or isinstance(value, (bool, ValidatedSchema))):\n value = normalize_rulesset(value)\n ValidatedSchema(self, {'allow_unknown': value})\n self._config['allow_unknown'] = value\n\n @property # type: ignore\n def schema(self):\n \"\"\"\n The validation schema of a validator. When a schema is passed to a validator\n method (e.g. ``validate``), it replaces this attribute.\n\n Type: any :term:`mapping` or :obj:`None`\n \"\"\"\n return self._schema\n\n @schema.setter\n def schema(self, schema):\n if schema is None:\n self._schema = None\n elif self.is_child or isinstance(schema, ValidatedSchema):\n self._schema = schema\n else:\n self._schema = ValidatedSchema(self, schema)\n\n"}}},{"rowIdx":346,"cells":{"text":{"kind":"string","value":"\nfrom pyflunearyou import Client\nfrom pyflunearyou.errors import FluNearYouError\nimport voluptuous as vol\n\nfrom homeassistant import config_entries\nfrom homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE\nfrom homeassistant.helpers import aiohttp_client, config_validation as cv\n\nfrom .const import DOMAIN, LOGGER # pylint: disable=unused-import\n\n\nclass FluNearYouFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):\n \"\"\"Handle an FluNearYou config flow.\"\"\"\n\n VERSION = 1\n CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL\n\n @property\n def data_schema(self):\n \"\"\"Return the data schema for integration.\"\"\"\n return vol.Schema(\n {\n vol.Required(\n CONF_LATITUDE, default=self.hass.config.latitude\n ): cv.latitude,\n vol.Required(\n CONF_LONGITUDE, default=self.hass.config.longitude\n ): cv.longitude,\n }\n )\n\n async def async_step_user(self, user_input=None):\n \"\"\"Handle the start of the config flow.\"\"\"\n if not user_input:\n return self.async_show_form(step_id=\"user\", data_schema=self.data_schema)\n\n unique_id = f\"{user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}\"\n\n await self.async_set_unique_id(unique_id)\n self._abort_if_unique_id_configured()\n\n websession = aiohttp_client.async_get_clientsession(self.hass)\n client = Client(websession)\n\n try:\n await client.cdc_reports.status_by_coordinates(\n user_input[CONF_LATITUDE], user_input[CONF_LONGITUDE]\n )\n except FluNearYouError as err:\n LOGGER.error(\"Error while configuring integration: %s\", err)\n return self.async_show_form(step_id=\"user\", errors={\"base\": \"unknown\"})\n\n return self.async_create_entry(title=unique_id, data=user_input)\n\n"}}},{"rowIdx":347,"cells":{"text":{"kind":"string","value":"\nimport numpy as np\nfrom jax import config\nimport pytest\nimport tensornetwork\nimport tensornetwork.linalg.operations\nfrom tensornetwork.linalg.operations import kron\nfrom tensornetwork.linalg.initialization import random_uniform\nfrom tensornetwork.tensor import Tensor\nfrom tensornetwork.ncon_interface import ncon\nfrom tensornetwork import backends\nfrom tensornetwork.tests import testing_utils\n\n# pylint: disable=no-member\nconfig.update(\"jax_enable_x64\", True)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_tensordot_invalid_backend_raises_value_error(backend, dtype):\n \"\"\"\n Tests that tensordot raises ValueError when fed Tensors with different\n backends. Other failure modes are tested at the backend level.\n \"\"\"\n backend_names = set([\"jax\", \"numpy\", \"tensorflow\", \"pytorch\"])\n this_name = set([backend])\n other_backend_names = list(backend_names - this_name)\n shape = (4, 4, 4)\n dtype1 = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype1)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype1)\n for other_backend in other_backend_names:\n dtype2 = testing_utils.np_dtype_to_backend(other_backend, dtype)\n testing_utils.check_contraction_dtype(other_backend, dtype2)\n tensor2 = tensornetwork.ones(shape, backend=other_backend, dtype=dtype2)\n with pytest.raises(ValueError):\n _ = tensornetwork.tensordot(tensor1, tensor2, [[2, 0, 1], [1, 2, 0]])\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_tensordot_vs_backend(backend, dtype):\n \"\"\"\n Tests that tensordot yields the same result as the backend equivalent.\n \"\"\"\n shape = (4, 4, 4)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor2 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensors = [tensor1, tensor2]\n dims = [[2, 0, 1], [1, 2, 0]]\n result = tensornetwork.tensordot(*tensors, dims)\n backend_obj = backends.backend_factory.get_backend(backend)\n arrays = [t.array for t in tensors]\n backend_result = backend_obj.tensordot(*arrays, axes=dims)\n np.testing.assert_allclose(backend_result, result.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_tensordot_int_vs_backend(backend, dtype):\n \"\"\"\n Tests that tensordot yields the same result as the backend equivalent.\n \"\"\"\n shape = (4, 4, 4)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor2 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensors = [tensor1, tensor2]\n dim = 1\n result = tensornetwork.tensordot(*tensors, dim)\n backend_obj = backends.backend_factory.get_backend(backend)\n arrays = [t.array for t in tensors]\n backend_result = backend_obj.tensordot(*arrays, axes=dim)\n np.testing.assert_allclose(backend_result, result.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_reshape_vs_backend(backend, dtype):\n \"\"\"\n Tests that reshape yields the same result as the backend equivalent.\n \"\"\"\n shape = (3, 2, 4)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n result = tensornetwork.reshape(tensor, (6, 4))\n backend_obj = backends.backend_factory.get_backend(backend)\n backend_result = backend_obj.reshape(tensor.array, (6, 4))\n assert result.shape == backend_result.shape\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_transpose_vs_backend(backend, dtype):\n \"\"\"\n Tests that transpose yields the same result as the backend equivalent.\n \"\"\"\n shape = (3, 2, 4)\n permutation = (1, 2, 0)\n tensor, array = testing_utils.safe_randn(shape, backend, dtype)\n\n if tensor is not None:\n backend_obj = backends.backend_factory.get_backend(backend)\n test = backend_obj.convert_to_tensor(array)\n test = backend_obj.transpose(test, perm=permutation)\n tensor_test = tensornetwork.transpose(tensor, perm=permutation)\n np.testing.assert_allclose(test, tensor_test.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\ndef test_hconj_vs_backend(backend, dtype):\n \"\"\"\n Tests that hconj yields the same result as the equivalent backend sequence.\n \"\"\"\n shape = (3, 2, 4)\n permutation = (1, 2, 0)\n tensor, array = testing_utils.safe_randn(shape, backend, dtype)\n\n if tensor is not None:\n backend_obj = backends.backend_factory.get_backend(backend)\n test = backend_obj.convert_to_tensor(array)\n test = backend_obj.transpose(test, perm=permutation)\n test = backend_obj.conj(test)\n tensor_test = tensornetwork.hconj(tensor, perm=permutation)\n np.testing.assert_allclose(test, tensor_test.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_take_slice_vs_backend(backend, dtype):\n \"\"\"\n Tests that take_slice yields the same result as the backend equivalent.\n \"\"\"\n shape = (5, 6, 7)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n start_indices = (1, 2, 3)\n slice_sizes = (2, 3, 3)\n result = tensornetwork.take_slice(tensor, start_indices, slice_sizes)\n backend_obj = backends.backend_factory.get_backend(backend)\n backend_result = backend_obj.slice(tensor.array, start_indices, slice_sizes)\n assert result.shape == backend_result.shape\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\n@pytest.mark.parametrize(\"fname\", [\"sin\", \"cos\", \"exp\", \"log\", \"conj\", \"sign\"])\ndef test_unary_ops_vs_backend(backend, dtype, fname):\n shape = (4, 5, 6)\n dtype_b = testing_utils.np_dtype_to_backend(backend, dtype)\n backend_obj = backends.backend_factory.get_backend(backend)\n backend_func = getattr(backend_obj, fname)\n tn_func = getattr(tensornetwork.linalg.operations, fname)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype_b)\n if backend == \"pytorch\" and fname in [\"sin\", \"log\", \"exp\", \"cos\"]:\n with pytest.raises(NotImplementedError):\n backend_result = backend_func(tensor.array)\n with pytest.raises(NotImplementedError):\n tn_result = tn_func(tensor).array\n else:\n backend_result = backend_func(tensor.array)\n tn_result = tn_func(tensor).array\n np.testing.assert_allclose(backend_result, tn_result)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_half)\ndef test_abs_vs_backend(backend, dtype):\n shape = (4, 5, 6)\n dtype_b = testing_utils.np_dtype_to_backend(backend, dtype)\n backend_obj = backends.backend_factory.get_backend(backend)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype_b)\n if (backend == \"pytorch\" and dtype == np.float16):\n pytest.skip(\"Prod not supported with this dtype and backend.\")\n else:\n backend_result = backend_obj.sqrt(tensor.array)\n tn_result = tensornetwork.sqrt(tensor).array\n np.testing.assert_allclose(backend_result, tn_result)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\ndef test_sqrt_vs_backend(backend, dtype):\n shape = (4, 5, 6)\n dtype_b = testing_utils.np_dtype_to_backend(backend, dtype)\n backend_obj = backends.backend_factory.get_backend(backend)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype_b)\n if (backend == \"pytorch\" and dtype == np.float16):\n pytest.skip(\"Prod not supported with this dtype and backend.\")\n else:\n backend_result = backend_obj.sqrt(tensor.array)\n tn_result = tensornetwork.sqrt(tensor).array\n np.testing.assert_allclose(backend_result, tn_result)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_shape(backend, dtype):\n shape = (4, 5, 6)\n dtype_b = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype_b)\n tn_result = tensornetwork.shape(tensor)\n assert tensor.shape == tn_result\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_einsum_invalid_backends(dtype, backend):\n backend_names = set([\"jax\", \"numpy\", \"tensorflow\", \"pytorch\"])\n this_name = set([backend])\n other_backend_names = list(backend_names - this_name)\n shape = (4, 3)\n dtype1 = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype1)\n for other_backend in other_backend_names:\n dtype2 = testing_utils.np_dtype_to_backend(other_backend, dtype)\n tensor2 = tensornetwork.ones(shape, backend=other_backend, dtype=dtype2)\n for other_other_backend in backend_names:\n dtype3 = testing_utils.np_dtype_to_backend(other_other_backend, dtype)\n tensor3 = tensornetwork.zeros(shape, backend=other_other_backend,\n dtype=dtype3)\n with pytest.raises(ValueError):\n _ = tensornetwork.einsum(\"ba, bc, dc\", tensor1, tensor2, tensor3,\n optimize=True)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_einsum_vs_backend(dtype, backend):\n shape = (4, 3)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor2 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor3 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n result = tensornetwork.einsum(\"ba, bc, dc\", tensor1, tensor2, tensor3,\n optimize=True)\n backend_obj = backends.backend_factory.get_backend(backend)\n arrays = [t.array for t in [tensor1, tensor2, tensor3]]\n backend_result = backend_obj.einsum(\"ba, bc, dc\", *arrays, optimize=True)\n np.testing.assert_allclose(backend_result, result.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_outer_invalid_backends(dtype, backend):\n backend_names = set([\"jax\", \"numpy\", \"tensorflow\", \"pytorch\"])\n this_name = set([backend])\n other_backend_names = list(backend_names - this_name)\n shape = (4, 3)\n dtype1 = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype1)\n for other_backend in other_backend_names:\n dtype2 = testing_utils.np_dtype_to_backend(other_backend, dtype)\n tensor2 = tensornetwork.ones(shape, backend=other_backend, dtype=dtype2)\n with pytest.raises(ValueError):\n _ = tensornetwork.outer(tensor1, tensor2)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_outer_vs_backend(dtype, backend):\n shape = (4, 3)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor2 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n result = tensornetwork.outer(tensor1, tensor2)\n backend_obj = backends.backend_factory.get_backend(backend)\n arrays = [t.array for t in [tensor1, tensor2]]\n backend_result = backend_obj.outer_product(*arrays)\n np.testing.assert_allclose(backend_result, result.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_all_dtypes)\ndef test_ncon_invalid_backends(dtype, backend):\n backend_names = set([\"jax\", \"numpy\", \"tensorflow\", \"pytorch\"])\n this_name = set([backend])\n other_backend_names = list(backend_names - this_name)\n shape = (4, 3)\n dtype1 = testing_utils.np_dtype_to_backend(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype1)\n for other_backend in other_backend_names:\n dtype2 = testing_utils.np_dtype_to_backend(other_backend, dtype)\n tensor2 = tensornetwork.ones(shape, backend=other_backend, dtype=dtype2)\n for other_other_backend in backend_names:\n dtype3 = testing_utils.np_dtype_to_backend(other_other_backend, dtype)\n tensor3 = tensornetwork.zeros(shape, backend=other_other_backend,\n dtype=dtype3)\n tensors = [tensor1, tensor2, tensor3]\n idxs = [[1, -1], [1, 2], [-2, 2]]\n with pytest.raises(ValueError):\n _ = ncon(tensors, idxs)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_bool)\ndef test_ncon_vs_backend(dtype, backend):\n shape = (4, 3)\n dtype = testing_utils.np_dtype_to_backend(backend, dtype)\n testing_utils.check_contraction_dtype(backend, dtype)\n tensor1 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor2 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensor3 = tensornetwork.ones(shape, backend=backend, dtype=dtype)\n tensors = [tensor1, tensor2, tensor3]\n arrays = [tensor1.array, tensor2.array, tensor3.array]\n idxs = [[1, -1], [1, 2], [-2, 2]]\n result = ncon(tensors, idxs, backend=backend)\n old_result = tensornetwork.ncon(arrays, idxs, backend=backend)\n np.testing.assert_allclose(old_result, result.array)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\ndef test_diagonal(backend, dtype):\n \"\"\" Checks that Tensor.diagonal() works.\n \"\"\"\n shape = (2, 3, 3)\n A, _ = testing_utils.safe_randn(shape, backend, dtype)\n if A is not None:\n np.testing.assert_allclose(tensornetwork.diagonal(A).array,\n A.backend.diagonal(A.array))\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\ndef test_diagflat(backend, dtype):\n \"\"\" Checks that Tensor.diagflat() works.\n \"\"\"\n shape = (2, 3, 3)\n A, _ = testing_utils.safe_randn(shape, backend, dtype)\n if A is not None:\n np.testing.assert_allclose(tensornetwork.diagflat(A).array,\n A.backend.diagflat(A.array))\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_not_half)\ndef test_trace(backend, dtype):\n \"\"\" Checks that Tensor.trace() works.\n \"\"\"\n shape = (2, 3, 3)\n A, _ = testing_utils.safe_randn(shape, backend, dtype)\n if A is not None:\n np.testing.assert_allclose(tensornetwork.trace(A).array,\n A.backend.trace(A.array))\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\n@pytest.mark.parametrize(\"pivotA\", [None, 1, 2, 0, -1])\ndef test_pivot(backend, dtype, pivotA):\n \"\"\" Checks that Tensor.pivot() works.\n \"\"\"\n shapeA = (2, 3, 4, 2)\n A, _ = testing_utils.safe_randn(shapeA, backend, dtype)\n if A is not None:\n if pivotA is None:\n matrixA = tensornetwork.pivot(A)\n tA = A.backend.pivot(A.array, pivot_axis=-1)\n else:\n matrixA = tensornetwork.pivot(A, pivot_axis=pivotA)\n tA = A.backend.pivot(A.array, pivot_axis=pivotA)\n np.testing.assert_allclose(matrixA.array, tA)\n\n\n@pytest.mark.parametrize(\"dtype\", testing_utils.np_float_dtypes)\ndef test_kron(backend, dtype):\n \"\"\" Checks that Tensor.kron() works.\n \"\"\"\n if (backend == \"pytorch\" and dtype in (np.complex64, np.complex128)):\n pytest.skip(\"pytorch support for complex dtypes is currently poor.\")\n\n np.random.seed(10)\n t1 = Tensor(np.random.rand(2, 2).astype(dtype), backend=backend)\n t2 = Tensor(np.random.rand(3, 3).astype(dtype), backend=backend)\n\n res_kron = kron(t1, t2)\n res_ncon = ncon([t1.array, t2.array], [[-1, -3], [-2, -4]], backend=backend)\n np.testing.assert_allclose(res_kron.array, res_ncon)\n mat1 = res_kron.reshape((6, 6))\n mat2 = np.kron(t1.array, t2.array)\n np.testing.assert_allclose(mat1.array, mat2)\n\n t1 = Tensor(np.random.rand(2, 2, 2, 2).astype(dtype), backend=backend)\n t2 = Tensor(np.random.rand(3, 3, 3, 3).astype(dtype), backend=backend)\n res_kron = kron(t1, t2)\n res_ncon = ncon([t1.array, t2.array], [[-1, -2, -5, -6], [-3, -4, -7, -8]],\n backend=backend)\n np.testing.assert_allclose(res_kron.array, res_ncon)\n mat1 = res_kron.reshape((36, 36))\n mat2 = np.kron(\n np.array(t1.array).reshape(4, 4),\n np.array(t2.array).reshape(9, 9))\n np.testing.assert_allclose(mat1.array, mat2)\n\n\ndef test_kron_raises(backend):\n np.random.seed(10)\n t1 = Tensor(np.random.rand(2, 2, 2), backend=backend)\n t2 = Tensor(np.random.rand(3, 3), backend=backend)\n with pytest.raises(ValueError, match=\"tensorA.ndim\"):\n kron(t1, t2)\n with pytest.raises(ValueError, match=\"tensorB.ndim\"):\n kron(t2, t1)\n\n t1 = Tensor(np.random.rand(2, 2, 2), backend='numpy')\n t2 = Tensor(np.random.rand(3, 3), backend='tensorflow')\n with pytest.raises(ValueError, match=\"kron\"):\n kron(t1, t2)\n\n"}}},{"rowIdx":348,"cells":{"text":{"kind":"string","value":"\nfrom collections import deque\nfrom copy import deepcopy\nfrom datetime import timedelta\n\nimport aiounifi\nimport pytest\n\nfrom homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN\nfrom homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN\nfrom homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN\nfrom homeassistant.components.unifi.const import (\n CONF_CONTROLLER,\n CONF_SITE_ID,\n DEFAULT_ALLOW_BANDWIDTH_SENSORS,\n DEFAULT_ALLOW_UPTIME_SENSORS,\n DEFAULT_DETECTION_TIME,\n DEFAULT_TRACK_CLIENTS,\n DEFAULT_TRACK_DEVICES,\n DEFAULT_TRACK_WIRED_CLIENTS,\n DOMAIN as UNIFI_DOMAIN,\n UNIFI_WIRELESS_CLIENTS,\n)\nfrom homeassistant.components.unifi.controller import (\n SUPPORTED_PLATFORMS,\n get_controller,\n)\nfrom homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect\nfrom homeassistant.const import (\n CONF_HOST,\n CONF_PASSWORD,\n CONF_PORT,\n CONF_USERNAME,\n CONF_VERIFY_SSL,\n)\nfrom homeassistant.setup import async_setup_component\n\nfrom tests.async_mock import patch\nfrom tests.common import MockConfigEntry\n\nCONTROLLER_HOST = {\n \"hostname\": \"controller_host\",\n \"ip\": \"1.2.3.4\",\n \"is_wired\": True,\n \"last_seen\": 1562600145,\n \"mac\": \"10:00:00:00:00:01\",\n \"name\": \"Controller host\",\n \"oui\": \"Producer\",\n \"sw_mac\": \"00:00:00:00:01:01\",\n \"sw_port\": 1,\n \"wired-rx_bytes\": 1234000000,\n \"wired-tx_bytes\": 5678000000,\n \"uptime\": 1562600160,\n}\n\nCONTROLLER_DATA = {\n CONF_HOST: \"1.2.3.4\",\n CONF_USERNAME: \"username\",\n CONF_PASSWORD: \"password\",\n CONF_PORT: 1234,\n CONF_SITE_ID: \"site_id\",\n CONF_VERIFY_SSL: False,\n}\n\nENTRY_CONFIG = {CONF_CONTROLLER: CONTROLLER_DATA}\nENTRY_OPTIONS = {}\n\nCONFIGURATION = []\n\nSITES = {\"Site name\": {\"desc\": \"Site name\", \"name\": \"site_id\", \"role\": \"admin\"}}\nDESCRIPTION = [{\"name\": \"username\", \"site_name\": \"site_id\", \"site_role\": \"admin\"}]\n\n\nasync def setup_unifi_integration(\n hass,\n config=ENTRY_CONFIG,\n options=ENTRY_OPTIONS,\n sites=SITES,\n site_description=DESCRIPTION,\n clients_response=None,\n devices_response=None,\n clients_all_response=None,\n wlans_response=None,\n known_wireless_clients=None,\n controllers=None,\n):\n \"\"\"Create the UniFi controller.\"\"\"\n assert await async_setup_component(hass, UNIFI_DOMAIN, {})\n\n config_entry = MockConfigEntry(\n domain=UNIFI_DOMAIN,\n data=deepcopy(config),\n options=deepcopy(options),\n entry_id=1,\n )\n config_entry.add_to_hass(hass)\n\n if known_wireless_clients:\n hass.data[UNIFI_WIRELESS_CLIENTS].update_data(\n known_wireless_clients, config_entry\n )\n\n mock_client_responses = deque()\n if clients_response:\n mock_client_responses.append(clients_response)\n\n mock_device_responses = deque()\n if devices_response:\n mock_device_responses.append(devices_response)\n\n mock_client_all_responses = deque()\n if clients_all_response:\n mock_client_all_responses.append(clients_all_response)\n\n mock_wlans_responses = deque()\n if wlans_response:\n mock_wlans_responses.append(wlans_response)\n\n mock_requests = []\n\n async def mock_request(self, method, path, json=None):\n mock_requests.append({\"method\": method, \"path\": path, \"json\": json})\n\n if path == \"/stat/sta\" and mock_client_responses:\n return mock_client_responses.popleft()\n if path == \"/stat/device\" and mock_device_responses:\n return mock_device_responses.popleft()\n if path == \"/rest/user\" and mock_client_all_responses:\n return mock_client_all_responses.popleft()\n if path == \"/rest/wlanconf\" and mock_wlans_responses:\n return mock_wlans_responses.popleft()\n return {}\n\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\",\n return_value=True,\n ), patch(\"aiounifi.Controller.sites\", return_value=sites), patch(\n \"aiounifi.Controller.site_description\", return_value=site_description\n ), patch(\n \"aiounifi.Controller.request\", new=mock_request\n ), patch.object(\n aiounifi.websocket.WSClient, \"start\", return_value=True\n ):\n await hass.config_entries.async_setup(config_entry.entry_id)\n await hass.async_block_till_done()\n\n if config_entry.entry_id not in hass.data[UNIFI_DOMAIN]:\n return None\n controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id]\n\n controller.mock_client_responses = mock_client_responses\n controller.mock_device_responses = mock_device_responses\n controller.mock_client_all_responses = mock_client_all_responses\n controller.mock_wlans_responses = mock_wlans_responses\n controller.mock_requests = mock_requests\n\n return controller\n\n\nasync def test_controller_setup(hass):\n \"\"\"Successful setup.\"\"\"\n with patch(\n \"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup\",\n return_value=True,\n ) as forward_entry_setup:\n controller = await setup_unifi_integration(hass)\n\n entry = controller.config_entry\n assert len(forward_entry_setup.mock_calls) == len(SUPPORTED_PLATFORMS)\n assert forward_entry_setup.mock_calls[0][1] == (entry, TRACKER_DOMAIN)\n assert forward_entry_setup.mock_calls[1][1] == (entry, SENSOR_DOMAIN)\n assert forward_entry_setup.mock_calls[2][1] == (entry, SWITCH_DOMAIN)\n\n assert controller.host == CONTROLLER_DATA[CONF_HOST]\n assert controller.site == CONTROLLER_DATA[CONF_SITE_ID]\n assert controller.site_name in SITES\n assert controller.site_role == SITES[controller.site_name][\"role\"]\n\n assert controller.option_allow_bandwidth_sensors == DEFAULT_ALLOW_BANDWIDTH_SENSORS\n assert controller.option_allow_uptime_sensors == DEFAULT_ALLOW_UPTIME_SENSORS\n assert isinstance(controller.option_block_clients, list)\n assert controller.option_track_clients == DEFAULT_TRACK_CLIENTS\n assert controller.option_track_devices == DEFAULT_TRACK_DEVICES\n assert controller.option_track_wired_clients == DEFAULT_TRACK_WIRED_CLIENTS\n assert controller.option_detection_time == timedelta(seconds=DEFAULT_DETECTION_TIME)\n assert isinstance(controller.option_ssid_filter, list)\n\n assert controller.mac is None\n\n assert controller.signal_update == \"unifi-update-1.2.3.4-site_id\"\n assert controller.signal_remove == \"unifi-remove-1.2.3.4-site_id\"\n assert controller.signal_options_update == \"unifi-options-1.2.3.4-site_id\"\n\n\nasync def test_controller_mac(hass):\n \"\"\"Test that it is possible to identify controller mac.\"\"\"\n controller = await setup_unifi_integration(hass, clients_response=[CONTROLLER_HOST])\n assert controller.mac == CONTROLLER_HOST[\"mac\"]\n\n\nasync def test_controller_not_accessible(hass):\n \"\"\"Retry to login gets scheduled when connection fails.\"\"\"\n with patch(\n \"homeassistant.components.unifi.controller.get_controller\",\n side_effect=CannotConnect,\n ):\n await setup_unifi_integration(hass)\n assert hass.data[UNIFI_DOMAIN] == {}\n\n\nasync def test_controller_unknown_error(hass):\n \"\"\"Unknown errors are handled.\"\"\"\n with patch(\n \"homeassistant.components.unifi.controller.get_controller\",\n side_effect=Exception,\n ):\n await setup_unifi_integration(hass)\n assert hass.data[UNIFI_DOMAIN] == {}\n\n\nasync def test_reset_after_successful_setup(hass):\n \"\"\"Calling reset when the entry has been setup.\"\"\"\n controller = await setup_unifi_integration(hass)\n\n assert len(controller.listeners) == 6\n\n result = await controller.async_reset()\n await hass.async_block_till_done()\n\n assert result is True\n assert len(controller.listeners) == 0\n\n\nasync def test_wireless_client_event_calls_update_wireless_devices(hass):\n \"\"\"Call update_wireless_devices method when receiving wireless client event.\"\"\"\n controller = await setup_unifi_integration(hass)\n\n with patch(\n \"homeassistant.components.unifi.controller.UniFiController.update_wireless_clients\",\n return_value=None,\n ) as wireless_clients_mock:\n controller.api.websocket._data = {\n \"meta\": {\"rc\": \"ok\", \"message\": \"events\"},\n \"data\": [\n {\n \"datetime\": \"2020-01-20T19:37:04Z\",\n \"key\": aiounifi.events.WIRELESS_CLIENT_CONNECTED,\n \"msg\": \"User[11:22:33:44:55:66] has connected to WLAN\",\n \"time\": 1579549024893,\n }\n ],\n }\n controller.api.session_handler(\"data\")\n\n assert wireless_clients_mock.assert_called_once\n\n\nasync def test_get_controller(hass):\n \"\"\"Successful call.\"\"\"\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\", return_value=True\n ):\n assert await get_controller(hass, **CONTROLLER_DATA)\n\n\nasync def test_get_controller_verify_ssl_false(hass):\n \"\"\"Successful call with verify ssl set to false.\"\"\"\n controller_data = dict(CONTROLLER_DATA)\n controller_data[CONF_VERIFY_SSL] = False\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\", return_value=True\n ):\n assert await get_controller(hass, **controller_data)\n\n\nasync def test_get_controller_login_failed(hass):\n \"\"\"Check that get_controller can handle a failed login.\"\"\"\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\", side_effect=aiounifi.Unauthorized\n ), pytest.raises(AuthenticationRequired):\n await get_controller(hass, **CONTROLLER_DATA)\n\n\nasync def test_get_controller_controller_unavailable(hass):\n \"\"\"Check that get_controller can handle controller being unavailable.\"\"\"\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\", side_effect=aiounifi.RequestError\n ), pytest.raises(CannotConnect):\n await get_controller(hass, **CONTROLLER_DATA)\n\n\nasync def test_get_controller_unknown_error(hass):\n \"\"\"Check that get_controller can handle unknown errors.\"\"\"\n with patch(\"aiounifi.Controller.check_unifi_os\", return_value=True), patch(\n \"aiounifi.Controller.login\", side_effect=aiounifi.AiounifiException\n ), pytest.raises(AuthenticationRequired):\n await get_controller(hass, **CONTROLLER_DATA)\n\n"}}},{"rowIdx":349,"cells":{"text":{"kind":"string","value":"\nimport diamond.collector\n\n\nclass XFSCollector(diamond.collector.Collector):\n\n PROC = '/proc/fs/xfs/stat'\n\n def get_default_config_help(self):\n config_help = super(XFSCollector, self).get_default_config_help()\n config_help.update({\n })\n return config_help\n\n def get_default_config(self):\n \"\"\"\n Returns the xfs collector settings\n \"\"\"\n config = super(XFSCollector, self).get_default_config()\n config.update({\n 'path': 'xfs'\n })\n return config\n\n def collect(self):\n \"\"\"\n Collect xfs stats.\n\n For an explanation of the following metrics visit\n http://xfs.org/index.php/Runtime_Stats\n https://github.com/torvalds/linux/blob/master/fs/xfs/xfs_stats.h\n \"\"\"\n data_structure = {\n 'extent_alloc': (\n 'alloc_extent',\n 'alloc_block',\n 'free_extent',\n 'free_block'\n ),\n 'abt': (\n 'lookup',\n 'compare',\n 'insrec',\n 'delrec'\n ),\n 'blk_map': (\n 'read_ops',\n 'write_ops',\n 'unmap',\n 'add_exlist',\n 'del_exlist',\n 'look_exlist',\n 'cmp_exlist'\n ),\n 'bmbt': (\n 'lookup',\n 'compare',\n 'insrec',\n 'delrec'\n ),\n 'dir': (\n 'lookup',\n 'create',\n 'remove',\n 'getdents'\n ),\n 'trans': (\n 'sync',\n 'async',\n 'empty'\n ),\n 'ig': (\n 'ig_attempts',\n 'ig_found',\n 'ig_frecycle',\n 'ig_missed',\n 'ig_dup',\n 'ig_reclaims',\n 'ig_attrchg'\n ),\n 'log': (\n 'writes',\n 'blocks',\n 'noiclogs',\n 'force',\n 'force_sleep'\n ),\n 'push_ail': (\n 'try_logspace',\n 'sleep_logspace',\n 'pushes',\n 'success',\n 'pushbuf',\n 'pinned',\n 'locked',\n 'flushing',\n 'restarts',\n 'flush'\n ),\n 'xstrat': (\n 'quick',\n 'split'\n ),\n 'rw': (\n 'write_calls',\n 'read_calls'\n ),\n 'attr': (\n 'get',\n 'set',\n 'remove',\n 'list'\n ),\n 'icluster': (\n 'iflush_count',\n 'icluster_flushcnt',\n 'icluster_flushinode'\n ),\n 'vnodes': (\n 'vn_active',\n 'vn_alloc',\n 'vn_get',\n 'vn_hold',\n 'vn_rele',\n 'vn_reclaim',\n 'vn_remove',\n 'vn_free'\n ),\n 'buf': (\n 'xb_get',\n 'xb_create',\n 'xb_get_locked',\n 'xb_get_locked_waited',\n 'xb_busy_locked',\n 'xb_miss_locked',\n 'xb_page_retries',\n 'xb_page_found',\n 'xb_get_read'\n ),\n 'abtb2': (\n 'xs_abtb_2_lookup',\n 'xs_abtb_2_compare',\n 'xs_abtb_2_insrec',\n 'xs_abtb_2_delrec',\n 'xs_abtb_2_newroot',\n 'xs_abtb_2_killroot',\n 'xs_abtb_2_increment',\n 'xs_abtb_2_decrement',\n 'xs_abtb_2_lshift',\n 'xs_abtb_2_rshift',\n 'xs_abtb_2_split',\n 'xs_abtb_2_join',\n 'xs_abtb_2_alloc',\n 'xs_abtb_2_free',\n 'xs_abtb_2_moves'\n ),\n 'abtc2': (\n 'xs_abtc_2_lookup',\n 'xs_abtc_2_compare',\n 'xs_abtc_2_insrec',\n 'xs_abtc_2_delrec',\n 'xs_abtc_2_newroot',\n 'xs_abtc_2_killroot',\n 'xs_abtc_2_increment',\n 'xs_abtc_2_decrement',\n 'xs_abtc_2_lshift',\n 'xs_abtc_2_rshift',\n 'xs_abtc_2_split',\n 'xs_abtc_2_join',\n 'xs_abtc_2_alloc',\n 'xs_abtc_2_free',\n 'xs_abtc_2_moves'\n ),\n 'bmbt2': (\n 'xs_bmbt_2_lookup',\n 'xs_bmbt_2_compare',\n 'xs_bmbt_2_insrec',\n 'xs_bmbt_2_delrec',\n 'xs_bmbt_2_newroot',\n 'xs_bmbt_2_killroot',\n 'xs_bmbt_2_increment',\n 'xs_bmbt_2_decrement',\n 'xs_bmbt_2_lshift',\n 'xs_bmbt_2_rshift',\n 'xs_bmbt_2_split',\n 'xs_bmbt_2_join',\n 'xs_bmbt_2_alloc',\n 'xs_bmbt_2_free',\n 'xs_bmbt_2_moves'\n ),\n 'ibt2': (\n 'lookup',\n 'compare',\n 'insrec',\n 'delrec',\n 'newroot',\n 'killroot',\n 'increment',\n 'decrement',\n 'lshift',\n 'rshift',\n 'split',\n 'join',\n 'alloc',\n 'free',\n 'moves'\n ),\n 'fibt2': (\n 'lookup',\n 'compare',\n 'insrec',\n 'delrec',\n 'newroot',\n 'killroot',\n 'increment',\n 'decrement',\n 'lshift',\n 'rshift',\n 'split',\n 'join',\n 'alloc',\n 'free',\n 'moves'\n ),\n 'qm': (\n 'xs_qm_dquot',\n 'xs_qm_dquot_unused'\n ),\n\n 'xpc': (\n 'xs_xstrat_bytes',\n 'xs_write_bytes',\n 'xs_read_bytes'\n ),\n 'debug': (\n 'debug',\n )\n }\n\n f = open(self.PROC)\n new_stats = f.readlines()\n f.close()\n\n stats = {}\n for line in new_stats:\n items = line.rstrip().split()\n stats[items[0]] = [int(a) for a in items[1:]]\n\n for key in stats.keys():\n for item in enumerate(data_structure[key]):\n metric_name = '.'.join([key, item[1]])\n value = stats[key][item[0]]\n self.publish_counter(metric_name, value)\n\n"}}},{"rowIdx":350,"cells":{"text":{"kind":"string","value":"\nfrom __future__ import division\n\nimport numpy as np\n\nimport chainer\nfrom chainer.backends import cuda\nimport chainer.functions as F\nfrom chainer import initializers\nimport chainer.links as L\n\nfrom chainercv.links.model.fpn.misc import argsort\nfrom chainercv.links.model.fpn.misc import choice\nfrom chainercv.links.model.fpn.misc import exp_clip\nfrom chainercv.links.model.fpn.misc import smooth_l1\nfrom chainercv import utils\n\n\nclass BboxHead(chainer.Chain):\n \"\"\"Bounding box head network of Feature Pyramid Networks.\n\n Args:\n n_class (int): The number of classes including background.\n scales (tuple of floats): The scales of feature maps.\n\n \"\"\"\n _canonical_level = 2\n _canonical_scale = 224\n _roi_size = 7\n _roi_sample_ratio = 2\n std = (0.1, 0.2)\n\n def __init__(self, n_class, scales):\n super(BboxHead, self).__init__()\n\n fc_init = {\n 'initialW': Caffe2FCUniform(),\n 'initial_bias': Caffe2FCUniform(),\n }\n with self.init_scope():\n self.fc1 = L.Linear(1024, **fc_init)\n self.fc2 = L.Linear(1024, **fc_init)\n self.loc = L.Linear(\n n_class * 4, initialW=initializers.Normal(0.001))\n self.conf = L.Linear(n_class, initialW=initializers.Normal(0.01))\n\n self._n_class = n_class\n self._scales = scales\n\n def forward(self, hs, rois, roi_indices):\n \"\"\"Calculates RoIs.\n\n Args:\n hs (iterable of array): An iterable of feature maps.\n rois (list of arrays): A list of arrays of shape: math: `(R_l, 4)`,\n where: math: `R_l` is the number of RoIs in the: math: `l`- th\n feature map.\n roi_indices (list of arrays): A list of arrays of\n shape :math:`(R_l,)`.\n\n Returns:\n tuple of two arrays:\n :obj:`locs` and :obj:`confs`.\n\n * **locs**: An arrays whose shape is \\\n :math:`(R, n\\_class, 4)`, where :math:`R` is the total number \\\n of RoIs in the batch.\n * **confs**: A list of array whose shape is :math:`(R, n\\_class)`.\n \"\"\"\n\n hs_ = []\n for l, h in enumerate(hs):\n if len(rois[l]) == 0:\n continue\n h = F.roi_average_align_2d(\n h, rois[l], roi_indices[l], self._roi_size,\n self._scales[l], self._roi_sample_ratio)\n hs_.append(h)\n hs = hs_\n\n if len(hs) == 0:\n locs = chainer.Variable(\n self.xp.empty((0, self._n_class, 4), dtype=np.float32))\n confs = chainer.Variable(\n self.xp.empty((0, self._n_class), dtype=np.float32))\n return locs, confs\n\n h = F.concat(hs, axis=0)\n h = F.reshape(h, (h.shape[0], -1))\n h = F.relu(self.fc1(h))\n h = F.relu(self.fc2(h))\n\n locs = self.loc(h)\n locs = F.reshape(locs, (locs.shape[0], -1, 4))\n confs = self.conf(h)\n return locs, confs\n\n def distribute(self, rois, roi_indices):\n \"\"\"Assigns Rois to feature maps according to their size.\n\n Args:\n rois (array): An array of shape :math:`(R, 4)`, \\\n where :math:`R` is the total number of RoIs in the given batch.\n roi_indices (array): An array of shape :math:`(R,)`.\n\n Returns:\n tuple of two lists:\n :obj:`rois` and :obj:`roi_indices`.\n\n * **rois**: A list of arrays of shape :math:`(R_l, 4)`, \\\n where :math:`R_l` is the number of RoIs in the :math:`l`-th \\\n feature map.\n * **roi_indices** : A list of arrays of shape :math:`(R_l,)`.\n \"\"\"\n\n size = self.xp.sqrt(self.xp.prod(rois[:, 2:] - rois[:, :2], axis=1))\n level = self.xp.floor(self.xp.log2(\n size / self._canonical_scale + 1e-6)).astype(np.int32)\n # skip last level\n level = self.xp.clip(\n level + self._canonical_level, 0, len(self._scales) - 2)\n\n masks = [level == l for l in range(len(self._scales))]\n rois = [rois[mask] for mask in masks]\n roi_indices = [roi_indices[mask] for mask in masks]\n\n return rois, roi_indices\n\n def decode(self, rois, roi_indices, locs, confs,\n scales, sizes, nms_thresh, score_thresh):\n \"\"\"Decodes back to coordinates of RoIs.\n\n This method decodes :obj:`locs` and :obj:`confs` returned\n by a FPN network back to :obj:`bboxes`,\n :obj:`labels` and :obj:`scores`.\n\n Args:\n rois (iterable of arrays): An iterable of arrays of\n shape :math:`(R_l, 4)`, where :math:`R_l` is the number\n of RoIs in the :math:`l`-th feature map.\n roi_indices (iterable of arrays): An iterable of arrays of\n shape :math:`(R_l,)`.\n locs (array): An array whose shape is :math:`(R, n\\_class, 4)`,\n where :math:`R` is the total number of RoIs in the given batch.\n confs (array): An array whose shape is :math:`(R, n\\_class)`.\n scales (list of floats): A list of floats returned\n by :meth:`~chainercv.links.model.fpn.faster_rcnn.prepare`\n sizes (list of tuples of two ints): A list of\n :math:`(H_n, W_n)`, where :math:`H_n` and :math:`W_n`\n are height and width of the :math:`n`-th image.\n nms_thresh (float): The threshold value\n for :func:`~chainercv.utils.non_maximum_suppression`.\n score_thresh (float): The threshold value for confidence score.\n\n Returns:\n tuple of three list of arrays:\n :obj:`bboxes`, :obj:`labels` and :obj:`scores`.\n\n * **bboxes**: A list of float arrays of shape :math:`(R'_n, 4)`, \\\n where :math:`R'_n` is the number of bounding boxes in \\\n the :math:`n`-th image. \\\n Each bounding box is organized by \\\n :math:`(y_{min}, x_{min}, y_{max}, x_{max})` \\\n in the second axis.\n * **labels** : A list of integer arrays of shape :math:`(R'_n,)`. \\\n Each value indicates the class of the bounding box. \\\n Values are in range :math:`[0, L - 1]`, where :math:`L` is the \\\n number of the foreground classes.\n * **scores** : A list of float arrays of shape :math:`(R'_n,)`. \\\n Each value indicates how confident the prediction is.\n \"\"\"\n\n rois = self.xp.vstack(rois)\n roi_indices = self.xp.hstack(roi_indices)\n locs = locs.array\n confs = confs.array\n\n bboxes = []\n labels = []\n scores = []\n for i in range(len(scales)):\n mask = roi_indices == i\n roi = rois[mask]\n loc = locs[mask]\n conf = confs[mask]\n\n bbox = self.xp.broadcast_to(roi[:, None], loc.shape) / scales[i]\n # tlbr -> yxhw\n bbox[:, :, 2:] -= bbox[:, :, :2]\n bbox[:, :, :2] += bbox[:, :, 2:] / 2\n # offset\n bbox[:, :, :2] += loc[:, :, :2] * bbox[:, :, 2:] * self.std[0]\n bbox[:, :, 2:] *= self.xp.exp(\n self.xp.minimum(loc[:, :, 2:] * self.std[1], exp_clip))\n # yxhw -> tlbr\n bbox[:, :, :2] -= bbox[:, :, 2:] / 2\n bbox[:, :, 2:] += bbox[:, :, :2]\n # clip\n bbox[:, :, :2] = self.xp.maximum(bbox[:, :, :2], 0)\n bbox[:, :, 2:] = self.xp.minimum(\n bbox[:, :, 2:], self.xp.array(sizes[i]))\n\n conf = self.xp.exp(conf)\n score = conf / self.xp.sum(conf, axis=1, keepdims=True)\n\n bbox, label, score = _suppress(\n bbox, score, nms_thresh, score_thresh)\n\n bboxes.append(bbox)\n labels.append(label)\n scores.append(score)\n\n return bboxes, labels, scores\n\n\ndef bbox_head_loss_pre(rois, roi_indices, std, bboxes, labels):\n \"\"\"Loss function for Head (pre).\n\n This function processes RoIs for :func:`bbox_head_loss_post`.\n\n Args:\n rois (iterable of arrays): An iterable of arrays of\n shape :math:`(R_l, 4)`, where :math:`R_l` is the number\n of RoIs in the :math:`l`-th feature map.\n roi_indices (iterable of arrays): An iterable of arrays of\n shape :math:`(R_l,)`.\n std (tuple of floats): Two coefficients used for encoding\n bounding boxes.\n bboxes (list of arrays): A list of arrays whose shape is\n :math:`(R_n, 4)`, where :math:`R_n` is the number of\n ground truth bounding boxes.\n labels (list of arrays): A list of arrays whose shape is\n :math:`(R_n,)`.\n\n Returns:\n tuple of four lists:\n :obj:`rois`, :obj:`roi_indices`, :obj:`gt_locs`, and :obj:`gt_labels`.\n\n * **rois**: A list of arrays of shape :math:`(R'_l, 4)`, \\\n where :math:`R'_l` is the number of RoIs in the :math:`l`-th \\\n feature map.\n * **roi_indices**: A list of arrays of shape :math:`(R'_l,)`.\n * **gt_locs**: A list of arrays of shape :math:`(R'_l, 4) \\\n indicating the bounding boxes of ground truth.\n * **roi_indices**: A list of arrays of shape :math:`(R'_l,)` \\\n indicating the classes of ground truth.\n \"\"\"\n\n thresh = 0.5\n batchsize_per_image = 512\n fg_ratio = 0.25\n\n xp = cuda.get_array_module(*rois)\n\n n_level = len(rois)\n roi_levels = xp.hstack(\n xp.array((l,) * len(rois[l])) for l in range(n_level)).astype(np.int32)\n rois = xp.vstack(rois).astype(np.float32)\n roi_indices = xp.hstack(roi_indices).astype(np.int32)\n\n rois_yx = (rois[:, 2:] + rois[:, :2]) / 2\n rois_hw = rois[:, 2:] - rois[:, :2]\n indices = np.unique(cuda.to_cpu(roi_indices))\n\n gt_locs = xp.empty_like(rois)\n gt_labels = xp.empty_like(roi_indices)\n for i in indices:\n mask = roi_indices == i\n\n if len(bboxes[i]) > 0:\n iou = utils.bbox_iou(rois[mask], bboxes[i])\n gt_index = iou.argmax(axis=1)\n\n gt_loc = bboxes[i][gt_index].copy()\n else:\n gt_loc = xp.empty_like(rois[mask])\n # tlbr -> yxhw\n gt_loc[:, 2:] -= gt_loc[:, :2]\n gt_loc[:, :2] += gt_loc[:, 2:] / 2\n # offset\n gt_loc[:, :2] = (gt_loc[:, :2] - rois_yx[mask]) / \\\n rois_hw[mask] / std[0]\n gt_loc[:, 2:] = xp.log(gt_loc[:, 2:] / rois_hw[mask]) / std[1]\n\n if len(bboxes[i]) > 0:\n gt_label = labels[i][gt_index] + 1\n gt_label[iou.max(axis=1) < thresh] = 0\n else:\n gt_label = xp.zeros(int(mask.sum()), dtype=np.int32)\n\n fg_index = xp.where(gt_label > 0)[0]\n n_fg = int(batchsize_per_image * fg_ratio)\n if len(fg_index) > n_fg:\n gt_label[choice(fg_index, size=len(fg_index) - n_fg)] = -1\n\n bg_index = xp.where(gt_label == 0)[0]\n n_bg = batchsize_per_image - int((gt_label > 0).sum())\n if len(bg_index) > n_bg:\n gt_label[choice(bg_index, size=len(bg_index) - n_bg)] = -1\n\n gt_locs[mask] = gt_loc\n gt_labels[mask] = gt_label\n\n mask = gt_labels >= 0\n rois = rois[mask]\n roi_indices = roi_indices[mask]\n roi_levels = roi_levels[mask]\n gt_locs = gt_locs[mask]\n gt_labels = gt_labels[mask]\n\n masks = [roi_levels == l for l in range(n_level)]\n rois = [rois[m] for m in masks]\n roi_indices = [roi_indices[m] for m in masks]\n gt_locs = [gt_locs[m] for m in masks]\n gt_labels = [gt_labels[m] for m in masks]\n\n return rois, roi_indices, gt_locs, gt_labels\n\n\ndef bbox_head_loss_post(\n locs, confs, roi_indices, gt_locs, gt_labels, batchsize):\n \"\"\"Loss function for Head (post).\n\n Args:\n locs (array): An array whose shape is :math:`(R, n\\_class, 4)`,\n where :math:`R` is the total number of RoIs in the given batch.\n confs (array): An iterable of arrays whose shape is\n :math:`(R, n\\_class)`.\n roi_indices (list of arrays): A list of arrays returned by\n :func:`bbox_head_locs_pre`.\n gt_locs (list of arrays): A list of arrays returned by\n :func:`bbox_head_locs_pre`.\n gt_labels (list of arrays): A list of arrays returned by\n :func:`bbox_head_locs_pre`.\n batchsize (int): The size of batch.\n\n Returns:\n tuple of two variables:\n :obj:`loc_loss` and :obj:`conf_loss`.\n \"\"\"\n\n xp = cuda.get_array_module(locs.array, confs.array)\n\n roi_indices = xp.hstack(roi_indices).astype(np.int32)\n gt_locs = xp.vstack(gt_locs).astype(np.float32)\n gt_labels = xp.hstack(gt_labels).astype(np.int32)\n\n loc_loss = 0\n conf_loss = 0\n for i in np.unique(cuda.to_cpu(roi_indices)):\n mask = roi_indices == i\n gt_loc = gt_locs[mask]\n gt_label = gt_labels[mask]\n\n n_sample = mask.sum()\n loc_loss += F.sum(smooth_l1(\n locs[mask][xp.where(gt_label > 0)[0], gt_label[gt_label > 0]],\n gt_loc[gt_label > 0], 1)) / n_sample\n conf_loss += F.softmax_cross_entropy(confs[mask], gt_label)\n\n loc_loss /= batchsize\n conf_loss /= batchsize\n\n return loc_loss, conf_loss\n\n\nclass Caffe2FCUniform(chainer.initializer.Initializer):\n \"\"\"Initializer used in Caffe2.\n\n \"\"\"\n\n def __call__(self, array):\n scale = 1 / np.sqrt(array.shape[-1])\n initializers.Uniform(scale)(array)\n\n\ndef _suppress(raw_bbox, raw_score, nms_thresh, score_thresh):\n xp = cuda.get_array_module(raw_bbox, raw_score)\n\n bbox = []\n label = []\n score = []\n for l in range(raw_score.shape[1] - 1):\n bbox_l = raw_bbox[:, l + 1]\n score_l = raw_score[:, l + 1]\n\n mask = score_l >= score_thresh\n bbox_l = bbox_l[mask]\n score_l = score_l[mask]\n\n order = argsort(-score_l)\n bbox_l = bbox_l[order]\n score_l = score_l[order]\n indices = utils.non_maximum_suppression(bbox_l, nms_thresh)\n bbox_l = bbox_l[indices]\n score_l = score_l[indices]\n\n bbox.append(bbox_l)\n label.append(xp.array((l,) * len(bbox_l)))\n score.append(score_l)\n\n bbox = xp.vstack(bbox).astype(np.float32)\n label = xp.hstack(label).astype(np.int32)\n score = xp.hstack(score).astype(np.float32)\n return bbox, label, score\n\n"}}},{"rowIdx":351,"cells":{"text":{"kind":"string","value":"\nimport logging\n\nfrom homeassistant.components.binary_sensor import BinarySensorEntity\n\nfrom .const import BSH_DOOR_STATE, DOMAIN\nfrom .entity import HomeConnectEntity\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the Home Connect binary sensor.\"\"\"\n\n def get_entities():\n entities = []\n hc_api = hass.data[DOMAIN][config_entry.entry_id]\n for device_dict in hc_api.devices:\n entity_dicts = device_dict.get(\"entities\", {}).get(\"binary_sensor\", [])\n entities += [HomeConnectBinarySensor(**d) for d in entity_dicts]\n return entities\n\n async_add_entities(await hass.async_add_executor_job(get_entities), True)\n\n\nclass HomeConnectBinarySensor(HomeConnectEntity, BinarySensorEntity):\n \"\"\"Binary sensor for Home Connect.\"\"\"\n\n def __init__(self, device, desc, device_class):\n \"\"\"Initialize the entity.\"\"\"\n super().__init__(device, desc)\n self._device_class = device_class\n self._state = None\n\n @property\n def is_on(self):\n \"\"\"Return true if the binary sensor is on.\"\"\"\n return bool(self._state)\n\n @property\n def available(self):\n \"\"\"Return true if the binary sensor is available.\"\"\"\n return self._state is not None\n\n async def async_update(self):\n \"\"\"Update the binary sensor's status.\"\"\"\n state = self.device.appliance.status.get(BSH_DOOR_STATE, {})\n if not state:\n self._state = None\n elif state.get(\"value\") in [\n \"BSH.Common.EnumType.DoorState.Closed\",\n \"BSH.Common.EnumType.DoorState.Locked\",\n ]:\n self._state = False\n elif state.get(\"value\") == \"BSH.Common.EnumType.DoorState.Open\":\n self._state = True\n else:\n _LOGGER.warning(\"Unexpected value for HomeConnect door state: %s\", state)\n self._state = None\n _LOGGER.debug(\"Updated, new state: %s\", self._state)\n\n @property\n def device_class(self):\n \"\"\"Return the device class.\"\"\"\n return self._device_class\n\n"}}},{"rowIdx":352,"cells":{"text":{"kind":"string","value":"\nfrom unittest import expectedFailure\n\nfrom stash.tests.stashtest import StashTestCase\n\n\nclass EchoTests(StashTestCase):\n \"\"\"tests for the 'echo' command.\"\"\"\n\n def do_echo(self, s):\n \"\"\"echo a string and return the echoed output.\"\"\"\n return self.run_command(\"echo \" + s, exitcode=0)\n\n def test_simple(self):\n \"\"\"test 'echo test'\"\"\"\n o = self.do_echo(\"test\")\n self.assertEqual(o, \"test\\n\")\n\n def test_multi(self):\n \"\"\"test 'echo test1 test2 test:'\"\"\"\n o = self.do_echo(\"test1 test2 test3\")\n self.assertEqual(o, \"test1 test2 test3\\n\")\n\n def test_help_ignore(self):\n \"\"\"test that -h and --help will be ignored by echo.\"\"\"\n ho = self.do_echo(\"-h\")\n self.assertEqual(ho, \"-h\\n\")\n helpo = self.do_echo(\"--help\")\n self.assertEqual(helpo, \"--help\\n\")\n\n def test_empty(self):\n \"\"\"test the behavior without arguments.\"\"\"\n output = self.run_command(\"echo\", exitcode=0)\n self.assertEqual(output, \"\\n\")\n\n @expectedFailure\n def test_non_ascii(self):\n \"\"\"test echo with non-ascii characters.\"\"\"\n output = self.do_echo(u\"Non-Ascii: äöüß end\")\n self.assertEqual(output, u\"Non-Ascii: äöüß end\\n\")\n\n"}}},{"rowIdx":353,"cells":{"text":{"kind":"string","value":"\nimport os.path\n\nimport coverage\n\n\nclass Plugin(coverage.CoveragePlugin):\n \"\"\"A file tracer plugin for testing.\"\"\"\n def file_tracer(self, filename):\n if \"render.py\" in filename:\n return RenderFileTracer()\n return None\n\n def file_reporter(self, filename):\n return FileReporter(filename)\n\n\nclass RenderFileTracer(coverage.FileTracer):\n \"\"\"A FileTracer using information from the caller.\"\"\"\n\n def has_dynamic_source_filename(self):\n return True\n\n def dynamic_source_filename(self, filename, frame):\n if frame.f_code.co_name != \"render\":\n return None\n source_filename = os.path.abspath(frame.f_locals['filename'])\n return source_filename\n\n def line_number_range(self, frame):\n lineno = frame.f_locals['linenum']\n return lineno, lineno+1\n\n\nclass FileReporter(coverage.FileReporter):\n \"\"\"A goofy file reporter.\"\"\"\n def lines(self):\n # Goofy test arrangement: claim that the file has as many lines as the\n # number in its name.\n num = os.path.basename(self.filename).split(\".\")[0].split(\"_\")[1]\n return set(range(1, int(num)+1))\n\n\ndef coverage_init(reg, options): # pylint: disable=unused-argument\n \"\"\"Called by coverage to initialize the plugins here.\"\"\"\n reg.add_file_tracer(Plugin())\n\n"}}},{"rowIdx":354,"cells":{"text":{"kind":"string","value":"\nimport os\nimport operator\nfrom typing import cast, Any, List, Optional, Tuple, Union\n\nfrom PyQt5.QtGui import QFont\nfrom PyQt5.QtWebEngineWidgets import QWebEngineSettings, QWebEngineProfile\n\nfrom qutebrowser.browser.webengine import spell, webenginequtescheme\nfrom qutebrowser.config import config, websettings\nfrom qutebrowser.config.websettings import AttributeInfo as Attr\nfrom qutebrowser.utils import standarddir, qtutils, message, log, urlmatch, usertypes\n\n# The default QWebEngineProfile\ndefault_profile = cast(QWebEngineProfile, None)\n# The QWebEngineProfile used for private (off-the-record) windows\nprivate_profile: Optional[QWebEngineProfile] = None\n# The global WebEngineSettings object\nglobal_settings = cast('WebEngineSettings', None)\n\nparsed_user_agent = None\n\n\nclass _SettingsWrapper:\n\n \"\"\"Expose a QWebEngineSettings interface which acts on all profiles.\n\n For read operations, the default profile value is always used.\n \"\"\"\n\n def _settings(self):\n yield default_profile.settings()\n if private_profile:\n yield private_profile.settings()\n\n def setAttribute(self, attribute, on):\n for settings in self._settings():\n settings.setAttribute(attribute, on)\n\n def setFontFamily(self, which, family):\n for settings in self._settings():\n settings.setFontFamily(which, family)\n\n def setFontSize(self, fonttype, size):\n for settings in self._settings():\n settings.setFontSize(fonttype, size)\n\n def setDefaultTextEncoding(self, encoding):\n for settings in self._settings():\n settings.setDefaultTextEncoding(encoding)\n\n def setUnknownUrlSchemePolicy(self, policy):\n for settings in self._settings():\n settings.setUnknownUrlSchemePolicy(policy)\n\n def testAttribute(self, attribute):\n return default_profile.settings().testAttribute(attribute)\n\n def fontSize(self, fonttype):\n return default_profile.settings().fontSize(fonttype)\n\n def fontFamily(self, which):\n return default_profile.settings().fontFamily(which)\n\n def defaultTextEncoding(self):\n return default_profile.settings().defaultTextEncoding()\n\n def unknownUrlSchemePolicy(self):\n return default_profile.settings().unknownUrlSchemePolicy()\n\n\nclass WebEngineSettings(websettings.AbstractSettings):\n\n \"\"\"A wrapper for the config for QWebEngineSettings.\"\"\"\n\n _ATTRIBUTES = {\n 'content.xss_auditing':\n Attr(QWebEngineSettings.XSSAuditingEnabled),\n 'content.images':\n Attr(QWebEngineSettings.AutoLoadImages),\n 'content.javascript.enabled':\n Attr(QWebEngineSettings.JavascriptEnabled),\n 'content.javascript.can_open_tabs_automatically':\n Attr(QWebEngineSettings.JavascriptCanOpenWindows),\n 'content.javascript.can_access_clipboard':\n Attr(QWebEngineSettings.JavascriptCanAccessClipboard),\n 'content.plugins':\n Attr(QWebEngineSettings.PluginsEnabled),\n 'content.hyperlink_auditing':\n Attr(QWebEngineSettings.HyperlinkAuditingEnabled),\n 'content.local_content_can_access_remote_urls':\n Attr(QWebEngineSettings.LocalContentCanAccessRemoteUrls),\n 'content.local_content_can_access_file_urls':\n Attr(QWebEngineSettings.LocalContentCanAccessFileUrls),\n 'content.webgl':\n Attr(QWebEngineSettings.WebGLEnabled),\n 'content.local_storage':\n Attr(QWebEngineSettings.LocalStorageEnabled),\n 'content.desktop_capture':\n Attr(QWebEngineSettings.ScreenCaptureEnabled,\n converter=lambda val: True if val == 'ask' else val),\n # 'ask' is handled via the permission system\n\n 'input.spatial_navigation':\n Attr(QWebEngineSettings.SpatialNavigationEnabled),\n 'input.links_included_in_focus_chain':\n Attr(QWebEngineSettings.LinksIncludedInFocusChain),\n\n 'scrolling.smooth':\n Attr(QWebEngineSettings.ScrollAnimatorEnabled),\n\n 'content.print_element_backgrounds':\n Attr(QWebEngineSettings.PrintElementBackgrounds),\n\n 'content.autoplay':\n Attr(QWebEngineSettings.PlaybackRequiresUserGesture,\n converter=operator.not_),\n\n 'content.dns_prefetch':\n Attr(QWebEngineSettings.DnsPrefetchEnabled),\n }\n\n _FONT_SIZES = {\n 'fonts.web.size.minimum':\n QWebEngineSettings.MinimumFontSize,\n 'fonts.web.size.minimum_logical':\n QWebEngineSettings.MinimumLogicalFontSize,\n 'fonts.web.size.default':\n QWebEngineSettings.DefaultFontSize,\n 'fonts.web.size.default_fixed':\n QWebEngineSettings.DefaultFixedFontSize,\n }\n\n _FONT_FAMILIES = {\n 'fonts.web.family.standard': QWebEngineSettings.StandardFont,\n 'fonts.web.family.fixed': QWebEngineSettings.FixedFont,\n 'fonts.web.family.serif': QWebEngineSettings.SerifFont,\n 'fonts.web.family.sans_serif': QWebEngineSettings.SansSerifFont,\n 'fonts.web.family.cursive': QWebEngineSettings.CursiveFont,\n 'fonts.web.family.fantasy': QWebEngineSettings.FantasyFont,\n }\n\n _UNKNOWN_URL_SCHEME_POLICY = {\n 'disallow':\n QWebEngineSettings.DisallowUnknownUrlSchemes,\n 'allow-from-user-interaction':\n QWebEngineSettings.AllowUnknownUrlSchemesFromUserInteraction,\n 'allow-all':\n QWebEngineSettings.AllowAllUnknownUrlSchemes,\n }\n\n # Mapping from WebEngineSettings::initDefaults in\n # qtwebengine/src/core/web_engine_settings.cpp\n _FONT_TO_QFONT = {\n QWebEngineSettings.StandardFont: QFont.Serif,\n QWebEngineSettings.FixedFont: QFont.Monospace,\n QWebEngineSettings.SerifFont: QFont.Serif,\n QWebEngineSettings.SansSerifFont: QFont.SansSerif,\n QWebEngineSettings.CursiveFont: QFont.Cursive,\n QWebEngineSettings.FantasyFont: QFont.Fantasy,\n }\n\n def set_unknown_url_scheme_policy(\n self, policy: Union[str, usertypes.Unset]) -> bool:\n \"\"\"Set the UnknownUrlSchemePolicy to use.\n\n Return:\n True if there was a change, False otherwise.\n \"\"\"\n old_value = self._settings.unknownUrlSchemePolicy()\n if isinstance(policy, usertypes.Unset):\n self._settings.resetUnknownUrlSchemePolicy()\n new_value = self._settings.unknownUrlSchemePolicy()\n else:\n new_value = self._UNKNOWN_URL_SCHEME_POLICY[policy]\n self._settings.setUnknownUrlSchemePolicy(new_value)\n return old_value != new_value\n\n def _update_setting(self, setting, value):\n if setting == 'content.unknown_url_scheme_policy':\n return self.set_unknown_url_scheme_policy(value)\n return super()._update_setting(setting, value)\n\n def init_settings(self):\n super().init_settings()\n self.update_setting('content.unknown_url_scheme_policy')\n\n\nclass ProfileSetter:\n\n \"\"\"Helper to set various settings on a profile.\"\"\"\n\n def __init__(self, profile):\n self._profile = profile\n\n def init_profile(self):\n \"\"\"Initialize settings on the given profile.\"\"\"\n self.set_http_headers()\n self.set_http_cache_size()\n self._set_hardcoded_settings()\n self.set_dictionary_language()\n\n def _set_hardcoded_settings(self):\n \"\"\"Set up settings with a fixed value.\"\"\"\n settings = self._profile.settings()\n\n settings.setAttribute(\n QWebEngineSettings.FullScreenSupportEnabled, True)\n settings.setAttribute(\n QWebEngineSettings.FocusOnNavigationEnabled, False)\n\n try:\n settings.setAttribute(QWebEngineSettings.PdfViewerEnabled, False)\n except AttributeError:\n # Added in Qt 5.13\n pass\n\n def set_http_headers(self):\n \"\"\"Set the user agent and accept-language for the given profile.\n\n We override those per request in the URL interceptor (to allow for\n per-domain values), but this one still gets used for things like\n window.navigator.userAgent/.languages in JS.\n \"\"\"\n user_agent = websettings.user_agent()\n self._profile.setHttpUserAgent(user_agent)\n\n accept_language = config.val.content.headers.accept_language\n if accept_language is not None:\n self._profile.setHttpAcceptLanguage(accept_language)\n\n def set_http_cache_size(self):\n \"\"\"Initialize the HTTP cache size for the given profile.\"\"\"\n size = config.val.content.cache.size\n if size is None:\n size = 0\n else:\n size = qtutils.check_overflow(size, 'int', fatal=False)\n\n # 0: automatically managed by QtWebEngine\n self._profile.setHttpCacheMaximumSize(size)\n\n def set_persistent_cookie_policy(self):\n \"\"\"Set the HTTP Cookie size for the given profile.\"\"\"\n assert not self._profile.isOffTheRecord()\n if config.val.content.cookies.store:\n value = QWebEngineProfile.AllowPersistentCookies\n else:\n value = QWebEngineProfile.NoPersistentCookies\n self._profile.setPersistentCookiesPolicy(value)\n\n def set_dictionary_language(self, warn=True):\n \"\"\"Load the given dictionaries.\"\"\"\n filenames = []\n for code in config.val.spellcheck.languages or []:\n local_filename = spell.local_filename(code)\n if not local_filename:\n if warn:\n message.warning(\"Language {} is not installed - see \"\n \"scripts/dictcli.py in qutebrowser's \"\n \"sources\".format(code))\n continue\n\n filenames.append(os.path.splitext(local_filename)[0])\n\n log.config.debug(\"Found dicts: {}\".format(filenames))\n self._profile.setSpellCheckLanguages(filenames)\n self._profile.setSpellCheckEnabled(bool(filenames))\n\n\ndef _update_settings(option):\n \"\"\"Update global settings when qwebsettings changed.\"\"\"\n global_settings.update_setting(option)\n\n # WORKAROUND for https://bugreports.qt.io/browse/QTBUG-75884\n # (note this isn't actually fixed properly before Qt 5.15)\n header_bug_fixed = qtutils.version_check('5.15', compiled=False)\n\n if option in ['content.headers.user_agent',\n 'content.headers.accept_language'] and header_bug_fixed:\n default_profile.setter.set_http_headers()\n if private_profile:\n private_profile.setter.set_http_headers()\n elif option == 'content.cache.size':\n default_profile.setter.set_http_cache_size()\n if private_profile:\n private_profile.setter.set_http_cache_size()\n elif option == 'content.cookies.store':\n default_profile.setter.set_persistent_cookie_policy()\n # We're not touching the private profile's cookie policy.\n elif option == 'spellcheck.languages':\n default_profile.setter.set_dictionary_language()\n if private_profile:\n private_profile.setter.set_dictionary_language(warn=False)\n\n\ndef _init_user_agent_str(ua):\n global parsed_user_agent\n parsed_user_agent = websettings.UserAgent.parse(ua)\n\n\ndef init_user_agent():\n _init_user_agent_str(QWebEngineProfile.defaultProfile().httpUserAgent())\n\n\ndef _init_default_profile():\n \"\"\"Init the default QWebEngineProfile.\"\"\"\n global default_profile\n\n default_profile = QWebEngineProfile.defaultProfile()\n init_user_agent()\n\n default_profile.setter = ProfileSetter( # type: ignore[attr-defined]\n default_profile)\n default_profile.setCachePath(\n os.path.join(standarddir.cache(), 'webengine'))\n default_profile.setPersistentStoragePath(\n os.path.join(standarddir.data(), 'webengine'))\n default_profile.setter.init_profile()\n default_profile.setter.set_persistent_cookie_policy()\n\n\ndef init_private_profile():\n \"\"\"Init the private QWebEngineProfile.\"\"\"\n global private_profile\n\n if not qtutils.is_single_process():\n private_profile = QWebEngineProfile()\n private_profile.setter = ProfileSetter( # type: ignore[attr-defined]\n private_profile)\n assert private_profile.isOffTheRecord()\n private_profile.setter.init_profile()\n\n\ndef _init_site_specific_quirks():\n \"\"\"Add custom user-agent settings for problematic sites.\n\n See https://github.com/qutebrowser/qutebrowser/issues/4810\n \"\"\"\n if not config.val.content.site_specific_quirks:\n return\n\n # Please leave this here as a template for new UAs.\n # default_ua = (\"Mozilla/5.0 ({os_info}) \"\n # \"AppleWebKit/{webkit_version} (KHTML, like Gecko) \"\n # \"{qt_key}/{qt_version} \"\n # \"{upstream_browser_key}/{upstream_browser_version} \"\n # \"Safari/{webkit_version}\")\n no_qtwe_ua = (\"Mozilla/5.0 ({os_info}) \"\n \"AppleWebKit/{webkit_version} (KHTML, like Gecko) \"\n \"{upstream_browser_key}/{upstream_browser_version} \"\n \"Safari/{webkit_version}\")\n new_chrome_ua = (\"Mozilla/5.0 ({os_info}) \"\n \"AppleWebKit/537.36 (KHTML, like Gecko) \"\n \"Chrome/99 \"\n \"Safari/537.36\")\n edge_ua = (\"Mozilla/5.0 ({os_info}) \"\n \"AppleWebKit/{webkit_version} (KHTML, like Gecko) \"\n \"{upstream_browser_key}/{upstream_browser_version} \"\n \"Safari/{webkit_version} \"\n \"Edg/{upstream_browser_version}\")\n\n user_agents = {\n # Needed to avoid a \"\"WhatsApp works with Google Chrome 36+\" error\n # page which doesn't allow to use WhatsApp Web at all. Also see the\n # additional JS quirk: qutebrowser/javascript/whatsapp_web_quirk.user.js\n # https://github.com/qutebrowser/qutebrowser/issues/4445\n 'https://web.whatsapp.com/': no_qtwe_ua,\n\n # Needed to avoid a \"you're using a browser [...] that doesn't allow us\n # to keep your account secure\" error.\n # https://github.com/qutebrowser/qutebrowser/issues/5182\n 'https://accounts.google.com/*': edge_ua,\n\n # Needed because Slack adds an error which prevents using it relatively\n # aggressively, despite things actually working fine.\n # September 2020: Qt 5.12 works, but Qt <= 5.11 shows the error.\n # https://github.com/qutebrowser/qutebrowser/issues/4669\n 'https://*.slack.com/*': new_chrome_ua,\n }\n\n for pattern, ua in user_agents.items():\n config.instance.set_obj('content.headers.user_agent', ua,\n pattern=urlmatch.UrlPattern(pattern),\n hide_userconfig=True)\n\n\ndef _init_devtools_settings():\n \"\"\"Make sure the devtools always get images/JS permissions.\"\"\"\n settings: List[Tuple[str, Any]] = [\n ('content.javascript.enabled', True),\n ('content.images', True),\n ('content.cookies.accept', 'all'),\n ]\n\n for setting, value in settings:\n for pattern in ['chrome-devtools://*', 'devtools://*']:\n config.instance.set_obj(setting, value,\n pattern=urlmatch.UrlPattern(pattern),\n hide_userconfig=True)\n\n\ndef init():\n \"\"\"Initialize the global QWebSettings.\"\"\"\n webenginequtescheme.init()\n spell.init()\n\n _init_default_profile()\n init_private_profile()\n config.instance.changed.connect(_update_settings)\n\n global global_settings\n global_settings = WebEngineSettings(_SettingsWrapper())\n global_settings.init_settings()\n\n _init_site_specific_quirks()\n _init_devtools_settings()\n\n\ndef shutdown():\n pass\n\n"}}},{"rowIdx":355,"cells":{"text":{"kind":"string","value":"\nfrom lightwave.lightwave import LWLink\nimport voluptuous as vol\n\nfrom homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN\nfrom homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN\nfrom homeassistant.const import CONF_HOST, CONF_LIGHTS, CONF_NAME, CONF_SWITCHES\nimport homeassistant.helpers.config_validation as cv\nfrom homeassistant.helpers.discovery import async_load_platform\n\nCONF_SERIAL = \"serial\"\nCONF_PROXY_IP = \"proxy_ip\"\nCONF_PROXY_PORT = \"proxy_port\"\nCONF_TRV = \"trv\"\nCONF_TRVS = \"trvs\"\nDEFAULT_PROXY_PORT = 7878\nDEFAULT_PROXY_IP = \"127.0.0.1\"\nDOMAIN = \"lightwave\"\nLIGHTWAVE_LINK = f\"{DOMAIN}_link\"\nLIGHTWAVE_TRV_PROXY = f\"{DOMAIN}_proxy\"\nLIGHTWAVE_TRV_PROXY_PORT = f\"{DOMAIN}_proxy_port\"\n\n\nCONFIG_SCHEMA = vol.Schema(\n {\n DOMAIN: vol.Schema(\n vol.All(\n cv.has_at_least_one_key(CONF_LIGHTS, CONF_SWITCHES, CONF_TRV),\n {\n vol.Required(CONF_HOST): cv.string,\n vol.Optional(CONF_LIGHTS, default={}): {\n cv.string: vol.Schema({vol.Required(CONF_NAME): cv.string})\n },\n vol.Optional(CONF_SWITCHES, default={}): {\n cv.string: vol.Schema({vol.Required(CONF_NAME): cv.string})\n },\n vol.Optional(CONF_TRV, default={}): {\n vol.Optional(\n CONF_PROXY_PORT, default=DEFAULT_PROXY_PORT\n ): cv.port,\n vol.Optional(\n CONF_PROXY_IP, default=DEFAULT_PROXY_IP\n ): cv.string,\n vol.Required(CONF_TRVS, default={}): {\n cv.string: vol.Schema(\n {\n vol.Required(CONF_NAME): cv.string,\n vol.Required(CONF_SERIAL): cv.string,\n }\n )\n },\n },\n },\n )\n )\n },\n extra=vol.ALLOW_EXTRA,\n)\n\n\nasync def async_setup(hass, config):\n \"\"\"Try to start embedded Lightwave broker.\"\"\"\n host = config[DOMAIN][CONF_HOST]\n lwlink = LWLink(host)\n hass.data[LIGHTWAVE_LINK] = lwlink\n\n lights = config[DOMAIN][CONF_LIGHTS]\n if lights:\n hass.async_create_task(\n async_load_platform(hass, \"light\", DOMAIN, lights, config)\n )\n\n switches = config[DOMAIN][CONF_SWITCHES]\n if switches:\n hass.async_create_task(\n async_load_platform(hass, \"switch\", DOMAIN, switches, config)\n )\n\n trv = config[DOMAIN][CONF_TRV]\n if trv:\n trvs = trv[CONF_TRVS]\n proxy_ip = trv[CONF_PROXY_IP]\n proxy_port = trv[CONF_PROXY_PORT]\n lwlink.set_trv_proxy(proxy_ip, proxy_port)\n\n platforms = [CLIMATE_DOMAIN, SENSOR_DOMAIN]\n for platform in platforms:\n hass.async_create_task(\n async_load_platform(hass, platform, DOMAIN, trvs, config)\n )\n\n return True\n\n"}}},{"rowIdx":356,"cells":{"text":{"kind":"string","value":"\nimport asyncio\n\nfrom haffmpeg.camera import CameraMjpeg\nfrom haffmpeg.tools import IMAGE_JPEG, ImageFrame\nfrom onvif.exceptions import ONVIFError\nimport voluptuous as vol\n\nfrom homeassistant.components.camera import SUPPORT_STREAM, Camera\nfrom homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS, DATA_FFMPEG\nfrom homeassistant.const import HTTP_BASIC_AUTHENTICATION\nfrom homeassistant.helpers import config_validation as cv, entity_platform\nfrom homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream\n\nfrom .base import ONVIFBaseEntity\nfrom .const import (\n ABSOLUTE_MOVE,\n ATTR_CONTINUOUS_DURATION,\n ATTR_DISTANCE,\n ATTR_MOVE_MODE,\n ATTR_PAN,\n ATTR_PRESET,\n ATTR_SPEED,\n ATTR_TILT,\n ATTR_ZOOM,\n CONF_RTSP_TRANSPORT,\n CONF_SNAPSHOT_AUTH,\n CONTINUOUS_MOVE,\n DIR_DOWN,\n DIR_LEFT,\n DIR_RIGHT,\n DIR_UP,\n DOMAIN,\n GOTOPRESET_MOVE,\n LOGGER,\n RELATIVE_MOVE,\n SERVICE_PTZ,\n ZOOM_IN,\n ZOOM_OUT,\n)\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the ONVIF camera video stream.\"\"\"\n platform = entity_platform.current_platform.get()\n\n # Create PTZ service\n platform.async_register_entity_service(\n SERVICE_PTZ,\n {\n vol.Optional(ATTR_PAN): vol.In([DIR_LEFT, DIR_RIGHT]),\n vol.Optional(ATTR_TILT): vol.In([DIR_UP, DIR_DOWN]),\n vol.Optional(ATTR_ZOOM): vol.In([ZOOM_OUT, ZOOM_IN]),\n vol.Optional(ATTR_DISTANCE, default=0.1): cv.small_float,\n vol.Optional(ATTR_SPEED, default=0.5): cv.small_float,\n vol.Optional(ATTR_MOVE_MODE, default=RELATIVE_MOVE): vol.In(\n [CONTINUOUS_MOVE, RELATIVE_MOVE, ABSOLUTE_MOVE, GOTOPRESET_MOVE]\n ),\n vol.Optional(ATTR_CONTINUOUS_DURATION, default=0.5): cv.small_float,\n vol.Optional(ATTR_PRESET, default=\"0\"): cv.string,\n },\n \"async_perform_ptz\",\n )\n\n device = hass.data[DOMAIN][config_entry.unique_id]\n async_add_entities(\n [ONVIFCameraEntity(device, profile) for profile in device.profiles]\n )\n\n return True\n\n\nclass ONVIFCameraEntity(ONVIFBaseEntity, Camera):\n \"\"\"Representation of an ONVIF camera.\"\"\"\n\n def __init__(self, device, profile):\n \"\"\"Initialize ONVIF camera entity.\"\"\"\n ONVIFBaseEntity.__init__(self, device, profile)\n Camera.__init__(self)\n self.stream_options[CONF_RTSP_TRANSPORT] = device.config_entry.options.get(\n CONF_RTSP_TRANSPORT\n )\n self._basic_auth = (\n device.config_entry.data.get(CONF_SNAPSHOT_AUTH)\n == HTTP_BASIC_AUTHENTICATION\n )\n self._stream_uri = None\n\n @property\n def supported_features(self) -> int:\n \"\"\"Return supported features.\"\"\"\n return SUPPORT_STREAM\n\n @property\n def name(self) -> str:\n \"\"\"Return the name of this camera.\"\"\"\n return f\"{self.device.name} - {self.profile.name}\"\n\n @property\n def unique_id(self) -> str:\n \"\"\"Return a unique ID.\"\"\"\n if self.profile.index:\n return f\"{self.device.info.mac or self.device.info.serial_number}_{self.profile.index}\"\n return self.device.info.mac or self.device.info.serial_number\n\n @property\n def entity_registry_enabled_default(self) -> bool:\n \"\"\"Return if the entity should be enabled when first added to the entity registry.\"\"\"\n return self.device.max_resolution == self.profile.video.resolution.width\n\n async def stream_source(self):\n \"\"\"Return the stream source.\"\"\"\n return self._stream_uri\n\n async def async_camera_image(self):\n \"\"\"Return a still image response from the camera.\"\"\"\n image = None\n\n if self.device.capabilities.snapshot:\n try:\n image = await self.device.device.get_snapshot(\n self.profile.token, self._basic_auth\n )\n except ONVIFError as err:\n LOGGER.error(\n \"Fetch snapshot image failed from %s, falling back to FFmpeg; %s\",\n self.device.name,\n err,\n )\n\n if image is None:\n ffmpeg = ImageFrame(self.hass.data[DATA_FFMPEG].binary, loop=self.hass.loop)\n image = await asyncio.shield(\n ffmpeg.get_image(\n self._stream_uri,\n output_format=IMAGE_JPEG,\n extra_cmd=self.device.config_entry.options.get(\n CONF_EXTRA_ARGUMENTS\n ),\n )\n )\n\n return image\n\n async def handle_async_mjpeg_stream(self, request):\n \"\"\"Generate an HTTP MJPEG stream from the camera.\"\"\"\n LOGGER.debug(\"Handling mjpeg stream from camera '%s'\", self.device.name)\n\n ffmpeg_manager = self.hass.data[DATA_FFMPEG]\n stream = CameraMjpeg(ffmpeg_manager.binary, loop=self.hass.loop)\n\n await stream.open_camera(\n self._stream_uri,\n extra_cmd=self.device.config_entry.options.get(CONF_EXTRA_ARGUMENTS),\n )\n\n try:\n stream_reader = await stream.get_reader()\n return await async_aiohttp_proxy_stream(\n self.hass,\n request,\n stream_reader,\n ffmpeg_manager.ffmpeg_stream_content_type,\n )\n finally:\n await stream.close()\n\n async def async_added_to_hass(self):\n \"\"\"Run when entity about to be added to hass.\"\"\"\n uri_no_auth = await self.device.async_get_stream_uri(self.profile)\n self._stream_uri = uri_no_auth.replace(\n \"rtsp://\", f\"rtsp://{self.device.username}:{self.device.password}@\", 1\n )\n\n async def async_perform_ptz(\n self,\n distance,\n speed,\n move_mode,\n continuous_duration,\n preset,\n pan=None,\n tilt=None,\n zoom=None,\n ) -> None:\n \"\"\"Perform a PTZ action on the camera.\"\"\"\n await self.device.async_perform_ptz(\n self.profile,\n distance,\n speed,\n move_mode,\n continuous_duration,\n preset,\n pan,\n tilt,\n zoom,\n )\n\n"}}},{"rowIdx":357,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nimport io\nimport logging\n\nimport aiohttp\nimport hangups\nfrom hangups import ChatMessageEvent, ChatMessageSegment, Client, get_auth, hangouts_pb2\n\nfrom homeassistant.const import HTTP_OK\nfrom homeassistant.core import callback\nfrom homeassistant.helpers import dispatcher, intent\nfrom homeassistant.helpers.aiohttp_client import async_get_clientsession\n\nfrom .const import (\n ATTR_DATA,\n ATTR_MESSAGE,\n ATTR_TARGET,\n CONF_CONVERSATION_ID,\n CONF_CONVERSATION_NAME,\n CONF_CONVERSATIONS,\n CONF_MATCHERS,\n DOMAIN,\n EVENT_HANGOUTS_CONNECTED,\n EVENT_HANGOUTS_CONVERSATIONS_CHANGED,\n EVENT_HANGOUTS_CONVERSATIONS_RESOLVED,\n EVENT_HANGOUTS_DISCONNECTED,\n EVENT_HANGOUTS_MESSAGE_RECEIVED,\n INTENT_HELP,\n)\nfrom .hangups_utils import HangoutsCredentials, HangoutsRefreshToken\n\n_LOGGER = logging.getLogger(__name__)\n\n\nclass HangoutsBot:\n \"\"\"The Hangouts Bot.\"\"\"\n\n def __init__(\n self, hass, refresh_token, intents, default_convs, error_suppressed_convs\n ):\n \"\"\"Set up the client.\"\"\"\n self.hass = hass\n self._connected = False\n\n self._refresh_token = refresh_token\n\n self._intents = intents\n self._conversation_intents = None\n\n self._client = None\n self._user_list = None\n self._conversation_list = None\n self._default_convs = default_convs\n self._default_conv_ids = None\n self._error_suppressed_convs = error_suppressed_convs\n self._error_suppressed_conv_ids = None\n\n dispatcher.async_dispatcher_connect(\n self.hass,\n EVENT_HANGOUTS_MESSAGE_RECEIVED,\n self._async_handle_conversation_message,\n )\n\n def _resolve_conversation_id(self, obj):\n if CONF_CONVERSATION_ID in obj:\n return obj[CONF_CONVERSATION_ID]\n if CONF_CONVERSATION_NAME in obj:\n conv = self._resolve_conversation_name(obj[CONF_CONVERSATION_NAME])\n if conv is not None:\n return conv.id_\n return None\n\n def _resolve_conversation_name(self, name):\n for conv in self._conversation_list.get_all():\n if conv.name == name:\n return conv\n return None\n\n @callback\n def async_update_conversation_commands(self):\n \"\"\"Refresh the commands for every conversation.\"\"\"\n self._conversation_intents = {}\n\n for intent_type, data in self._intents.items():\n if data.get(CONF_CONVERSATIONS):\n conversations = []\n for conversation in data.get(CONF_CONVERSATIONS):\n conv_id = self._resolve_conversation_id(conversation)\n if conv_id is not None:\n conversations.append(conv_id)\n data[f\"_{CONF_CONVERSATIONS}\"] = conversations\n elif self._default_conv_ids:\n data[f\"_{CONF_CONVERSATIONS}\"] = self._default_conv_ids\n else:\n data[f\"_{CONF_CONVERSATIONS}\"] = [\n conv.id_ for conv in self._conversation_list.get_all()\n ]\n\n for conv_id in data[f\"_{CONF_CONVERSATIONS}\"]:\n if conv_id not in self._conversation_intents:\n self._conversation_intents[conv_id] = {}\n\n self._conversation_intents[conv_id][intent_type] = data\n\n try:\n self._conversation_list.on_event.remove_observer(\n self._async_handle_conversation_event\n )\n except ValueError:\n pass\n self._conversation_list.on_event.add_observer(\n self._async_handle_conversation_event\n )\n\n @callback\n def async_resolve_conversations(self, _):\n \"\"\"Resolve the list of default and error suppressed conversations.\"\"\"\n self._default_conv_ids = []\n self._error_suppressed_conv_ids = []\n\n for conversation in self._default_convs:\n conv_id = self._resolve_conversation_id(conversation)\n if conv_id is not None:\n self._default_conv_ids.append(conv_id)\n\n for conversation in self._error_suppressed_convs:\n conv_id = self._resolve_conversation_id(conversation)\n if conv_id is not None:\n self._error_suppressed_conv_ids.append(conv_id)\n dispatcher.async_dispatcher_send(\n self.hass, EVENT_HANGOUTS_CONVERSATIONS_RESOLVED\n )\n\n async def _async_handle_conversation_event(self, event):\n if isinstance(event, ChatMessageEvent):\n dispatcher.async_dispatcher_send(\n self.hass,\n EVENT_HANGOUTS_MESSAGE_RECEIVED,\n event.conversation_id,\n event.user_id,\n event,\n )\n\n async def _async_handle_conversation_message(self, conv_id, user_id, event):\n \"\"\"Handle a message sent to a conversation.\"\"\"\n user = self._user_list.get_user(user_id)\n if user.is_self:\n return\n message = event.text\n\n _LOGGER.debug(\"Handling message '%s' from %s\", message, user.full_name)\n\n intents = self._conversation_intents.get(conv_id)\n if intents is not None:\n is_error = False\n try:\n intent_result = await self._async_process(intents, message, conv_id)\n except (intent.UnknownIntent, intent.IntentHandleError) as err:\n is_error = True\n intent_result = intent.IntentResponse()\n intent_result.async_set_speech(str(err))\n\n if intent_result is None:\n is_error = True\n intent_result = intent.IntentResponse()\n intent_result.async_set_speech(\"Sorry, I didn't understand that\")\n\n message = (\n intent_result.as_dict().get(\"speech\", {}).get(\"plain\", {}).get(\"speech\")\n )\n\n if (message is not None) and not (\n is_error and conv_id in self._error_suppressed_conv_ids\n ):\n await self._async_send_message(\n [{\"text\": message, \"parse_str\": True}],\n [{CONF_CONVERSATION_ID: conv_id}],\n None,\n )\n\n async def _async_process(self, intents, text, conv_id):\n \"\"\"Detect a matching intent.\"\"\"\n for intent_type, data in intents.items():\n for matcher in data.get(CONF_MATCHERS, []):\n match = matcher.match(text)\n\n if not match:\n continue\n if intent_type == INTENT_HELP:\n return await self.hass.helpers.intent.async_handle(\n DOMAIN, intent_type, {\"conv_id\": {\"value\": conv_id}}, text\n )\n\n return await self.hass.helpers.intent.async_handle(\n DOMAIN,\n intent_type,\n {key: {\"value\": value} for key, value in match.groupdict().items()},\n text,\n )\n\n async def async_connect(self):\n \"\"\"Login to the Google Hangouts.\"\"\"\n session = await self.hass.async_add_executor_job(\n get_auth,\n HangoutsCredentials(None, None, None),\n HangoutsRefreshToken(self._refresh_token),\n )\n\n self._client = Client(session)\n self._client.on_connect.add_observer(self._on_connect)\n self._client.on_disconnect.add_observer(self._on_disconnect)\n\n self.hass.loop.create_task(self._client.connect())\n\n def _on_connect(self):\n _LOGGER.debug(\"Connected!\")\n self._connected = True\n dispatcher.async_dispatcher_send(self.hass, EVENT_HANGOUTS_CONNECTED)\n\n async def _on_disconnect(self):\n \"\"\"Handle disconnecting.\"\"\"\n if self._connected:\n _LOGGER.debug(\"Connection lost! Reconnect...\")\n await self.async_connect()\n else:\n dispatcher.async_dispatcher_send(self.hass, EVENT_HANGOUTS_DISCONNECTED)\n\n async def async_disconnect(self):\n \"\"\"Disconnect the client if it is connected.\"\"\"\n if self._connected:\n self._connected = False\n await self._client.disconnect()\n\n async def async_handle_hass_stop(self, _):\n \"\"\"Run once when Home Assistant stops.\"\"\"\n await self.async_disconnect()\n\n async def _async_send_message(self, message, targets, data):\n conversations = []\n for target in targets:\n conversation = None\n if CONF_CONVERSATION_ID in target:\n conversation = self._conversation_list.get(target[CONF_CONVERSATION_ID])\n elif CONF_CONVERSATION_NAME in target:\n conversation = self._resolve_conversation_name(\n target[CONF_CONVERSATION_NAME]\n )\n if conversation is not None:\n conversations.append(conversation)\n\n if not conversations:\n return False\n\n messages = []\n for segment in message:\n if messages:\n messages.append(\n ChatMessageSegment(\n \"\", segment_type=hangouts_pb2.SEGMENT_TYPE_LINE_BREAK\n )\n )\n if \"parse_str\" in segment and segment[\"parse_str\"]:\n messages.extend(ChatMessageSegment.from_str(segment[\"text\"]))\n else:\n if \"parse_str\" in segment:\n del segment[\"parse_str\"]\n messages.append(ChatMessageSegment(**segment))\n\n image_file = None\n if data:\n if data.get(\"image_url\"):\n uri = data.get(\"image_url\")\n try:\n websession = async_get_clientsession(self.hass)\n async with websession.get(uri, timeout=5) as response:\n if response.status != HTTP_OK:\n _LOGGER.error(\n \"Fetch image failed, %s, %s\", response.status, response\n )\n image_file = None\n else:\n image_data = await response.read()\n image_file = io.BytesIO(image_data)\n image_file.name = \"image.png\"\n except (asyncio.TimeoutError, aiohttp.ClientError) as error:\n _LOGGER.error(\"Failed to fetch image, %s\", type(error))\n image_file = None\n elif data.get(\"image_file\"):\n uri = data.get(\"image_file\")\n if self.hass.config.is_allowed_path(uri):\n try:\n image_file = open(uri, \"rb\")\n except OSError as error:\n _LOGGER.error(\n \"Image file I/O error(%s): %s\", error.errno, error.strerror\n )\n else:\n _LOGGER.error('Path \"%s\" not allowed', uri)\n\n if not messages:\n return False\n for conv in conversations:\n await conv.send_message(messages, image_file)\n\n async def _async_list_conversations(self):\n (\n self._user_list,\n self._conversation_list,\n ) = await hangups.build_user_conversation_list(self._client)\n conversations = {}\n for i, conv in enumerate(self._conversation_list.get_all()):\n users_in_conversation = []\n for user in conv.users:\n users_in_conversation.append(user.full_name)\n conversations[str(i)] = {\n CONF_CONVERSATION_ID: str(conv.id_),\n CONF_CONVERSATION_NAME: conv.name,\n \"users\": users_in_conversation,\n }\n\n self.hass.states.async_set(\n f\"{DOMAIN}.conversations\",\n len(self._conversation_list.get_all()),\n attributes=conversations,\n )\n dispatcher.async_dispatcher_send(\n self.hass, EVENT_HANGOUTS_CONVERSATIONS_CHANGED, conversations\n )\n\n async def async_handle_send_message(self, service):\n \"\"\"Handle the send_message service.\"\"\"\n await self._async_send_message(\n service.data[ATTR_MESSAGE],\n service.data[ATTR_TARGET],\n service.data.get(ATTR_DATA, {}),\n )\n\n async def async_handle_update_users_and_conversations(self, _=None):\n \"\"\"Handle the update_users_and_conversations service.\"\"\"\n await self._async_list_conversations()\n\n async def async_handle_reconnect(self, _=None):\n \"\"\"Handle the reconnect service.\"\"\"\n await self.async_disconnect()\n await self.async_connect()\n\n def get_intents(self, conv_id):\n \"\"\"Return the intents for a specific conversation.\"\"\"\n return self._conversation_intents.get(conv_id)\n\n"}}},{"rowIdx":358,"cells":{"text":{"kind":"string","value":"\nimport pandas as pd\n\n\ndef get_p_vals(df, positive_category, term_significance):\n\t'''\n\tParameters\n\t----------\n\tdf : A data frame from, e.g., get_term_freq_df : pd.DataFrame\n\tpositive_category : str\n\t\tThe positive category name.\n\tterm_significance : TermSignificance\n\t\tA TermSignificance instance from which to extract p-values.\n\t'''\n\tdf_pos = df[[positive_category]]\n\tdf_pos.columns = ['pos']\n\tdf_neg = pd.DataFrame(df[[c for c in df.columns if\n\t c != positive_category\n\t and c.endswith(' freq')]].sum(axis=1))\n\tdf_neg.columns = ['neg']\n\tX = df_pos.join(df_neg)[['pos','neg']].values\n\treturn term_significance.get_p_vals(X)\n\n"}}},{"rowIdx":359,"cells":{"text":{"kind":"string","value":"\nfrom unittest.mock import patch\n\nfrom homeassistant import setup\nfrom homeassistant.core import callback\nfrom homeassistant.helpers import discovery\n\nfrom tests.common import (\n MockModule,\n MockPlatform,\n get_test_home_assistant,\n mock_coro,\n mock_entity_platform,\n mock_integration,\n)\n\n\nclass TestHelpersDiscovery:\n \"\"\"Tests for discovery helper methods.\"\"\"\n\n def setup_method(self, method):\n \"\"\"Set up things to be run when tests are started.\"\"\"\n self.hass = get_test_home_assistant()\n\n def teardown_method(self, method):\n \"\"\"Stop everything that was started.\"\"\"\n self.hass.stop()\n\n @patch(\"homeassistant.setup.async_setup_component\", return_value=mock_coro())\n def test_listen(self, mock_setup_component):\n \"\"\"Test discovery listen/discover combo.\"\"\"\n helpers = self.hass.helpers\n calls_single = []\n calls_multi = []\n\n @callback\n def callback_single(service, info):\n \"\"\"Service discovered callback.\"\"\"\n calls_single.append((service, info))\n\n @callback\n def callback_multi(service, info):\n \"\"\"Service discovered callback.\"\"\"\n calls_multi.append((service, info))\n\n helpers.discovery.listen(\"test service\", callback_single)\n helpers.discovery.listen([\"test service\", \"another service\"], callback_multi)\n\n helpers.discovery.discover(\n \"test service\", \"discovery info\", \"test_component\", {}\n )\n self.hass.block_till_done()\n\n assert mock_setup_component.called\n assert mock_setup_component.call_args[0] == (self.hass, \"test_component\", {})\n assert len(calls_single) == 1\n assert calls_single[0] == (\"test service\", \"discovery info\")\n\n helpers.discovery.discover(\n \"another service\", \"discovery info\", \"test_component\", {}\n )\n self.hass.block_till_done()\n\n assert len(calls_single) == 1\n assert len(calls_multi) == 2\n assert [\"test service\", \"another service\"] == [info[0] for info in calls_multi]\n\n @patch(\"homeassistant.setup.async_setup_component\", return_value=mock_coro(True))\n def test_platform(self, mock_setup_component):\n \"\"\"Test discover platform method.\"\"\"\n calls = []\n\n @callback\n def platform_callback(platform, info):\n \"\"\"Platform callback method.\"\"\"\n calls.append((platform, info))\n\n discovery.listen_platform(self.hass, \"test_component\", platform_callback)\n\n discovery.load_platform(\n self.hass,\n \"test_component\",\n \"test_platform\",\n \"discovery info\",\n {\"test_component\": {}},\n )\n self.hass.block_till_done()\n assert mock_setup_component.called\n assert mock_setup_component.call_args[0] == (\n self.hass,\n \"test_component\",\n {\"test_component\": {}},\n )\n self.hass.block_till_done()\n\n discovery.load_platform(\n self.hass,\n \"test_component_2\",\n \"test_platform\",\n \"discovery info\",\n {\"test_component\": {}},\n )\n self.hass.block_till_done()\n\n assert len(calls) == 1\n assert calls[0] == (\"test_platform\", \"discovery info\")\n\n self.hass.bus.fire(\n discovery.EVENT_PLATFORM_DISCOVERED,\n {\n discovery.ATTR_SERVICE: discovery.EVENT_LOAD_PLATFORM.format(\n \"test_component\"\n )\n },\n )\n self.hass.block_till_done()\n\n assert len(calls) == 1\n\n def test_circular_import(self):\n \"\"\"Test we don't break doing circular import.\n\n This test will have test_component discover the switch.test_circular\n component while setting up.\n\n The supplied config will load test_component and will load\n switch.test_circular.\n\n That means that after startup, we will have test_component and switch\n setup. The test_circular platform has been loaded twice.\n \"\"\"\n component_calls = []\n platform_calls = []\n\n def component_setup(hass, config):\n \"\"\"Set up mock component.\"\"\"\n discovery.load_platform(hass, \"switch\", \"test_circular\", \"disc\", config)\n component_calls.append(1)\n return True\n\n def setup_platform(hass, config, add_entities_callback, discovery_info=None):\n \"\"\"Set up mock platform.\"\"\"\n platform_calls.append(\"disc\" if discovery_info else \"component\")\n\n mock_integration(self.hass, MockModule(\"test_component\", setup=component_setup))\n\n # dependencies are only set in component level\n # since we are using manifest to hold them\n mock_integration(\n self.hass, MockModule(\"test_circular\", dependencies=[\"test_component\"])\n )\n mock_entity_platform(\n self.hass, \"switch.test_circular\", MockPlatform(setup_platform)\n )\n\n setup.setup_component(\n self.hass,\n \"test_component\",\n {\"test_component\": None, \"switch\": [{\"platform\": \"test_circular\"}]},\n )\n\n self.hass.block_till_done()\n\n # test_component will only be setup once\n assert len(component_calls) == 1\n # The platform will be setup once via the config in `setup_component`\n # and once via the discovery inside test_component.\n assert len(platform_calls) == 2\n assert \"test_component\" in self.hass.config.components\n assert \"switch\" in self.hass.config.components\n\n @patch(\"homeassistant.helpers.signal.async_register_signal_handling\")\n def test_1st_discovers_2nd_component(self, mock_signal):\n \"\"\"Test that we don't break if one component discovers the other.\n\n If the first component fires a discovery event to set up the\n second component while the second component is about to be set up,\n it should not set up the second component twice.\n \"\"\"\n component_calls = []\n\n def component1_setup(hass, config):\n \"\"\"Set up mock component.\"\"\"\n print(\"component1 setup\")\n discovery.discover(hass, \"test_component2\", {}, \"test_component2\", {})\n return True\n\n def component2_setup(hass, config):\n \"\"\"Set up mock component.\"\"\"\n component_calls.append(1)\n return True\n\n mock_integration(\n self.hass, MockModule(\"test_component1\", setup=component1_setup)\n )\n\n mock_integration(\n self.hass, MockModule(\"test_component2\", setup=component2_setup)\n )\n\n @callback\n def do_setup():\n \"\"\"Set up 2 components.\"\"\"\n self.hass.async_add_job(\n setup.async_setup_component(self.hass, \"test_component1\", {})\n )\n self.hass.async_add_job(\n setup.async_setup_component(self.hass, \"test_component2\", {})\n )\n\n self.hass.add_job(do_setup)\n self.hass.block_till_done()\n\n # test_component will only be setup once\n assert len(component_calls) == 1\n\n"}}},{"rowIdx":360,"cells":{"text":{"kind":"string","value":"\nimport struct\nfrom plumbum.lib import six\n\nLFANEW_OFFSET = 30 * 2\nFILE_HEADER_SIZE = 5 * 4\nSUBSYSTEM_OFFSET = 17 * 4\nIMAGE_SUBSYSTEM_WINDOWS_GUI = 2\nIMAGE_SUBSYSTEM_WINDOWS_CUI = 3\n\n\ndef get_pe_subsystem(filename):\n with open(filename, \"rb\") as f:\n if f.read(2) != six.b(\"MZ\"):\n return None\n f.seek(LFANEW_OFFSET)\n lfanew = struct.unpack(\"L\", f.read(4))[0]\n f.seek(lfanew)\n if f.read(4) != six.b(\"PE\\x00\\x00\"):\n return None\n f.seek(FILE_HEADER_SIZE + SUBSYSTEM_OFFSET, 1)\n subsystem = struct.unpack(\"H\", f.read(2))[0]\n return subsystem\n\n\n# print(get_pe_subsystem(\"c:\\\\windows\\\\notepad.exe\")) == 2\n# print(get_pe_subsystem(\"c:\\\\python32\\\\python.exe\")) == 3\n# print(get_pe_subsystem(\"c:\\\\python32\\\\pythonw.exe\")) == 2\n\n"}}},{"rowIdx":361,"cells":{"text":{"kind":"string","value":"\nimport asyncio\nfrom contextlib import suppress\nfrom datetime import timedelta\nimport logging\n\nfrom aiohttp import web\nfrom aiohttp.web_exceptions import HTTPNotFound\nimport async_timeout\n\nfrom homeassistant.components.http import HomeAssistantView\nfrom homeassistant.const import HTTP_INTERNAL_SERVER_ERROR\nfrom homeassistant.core import callback\nfrom homeassistant.exceptions import HomeAssistantError\nfrom homeassistant.helpers import config_per_platform, discovery\nfrom homeassistant.helpers.entity import Entity\nfrom homeassistant.helpers.entity_component import EntityComponent\nfrom homeassistant.setup import async_prepare_setup_platform\n\n# mypy: allow-untyped-defs, no-check-untyped-defs\n\n_LOGGER = logging.getLogger(__name__)\n\nDOMAIN = \"mailbox\"\n\nEVENT = \"mailbox_updated\"\nCONTENT_TYPE_MPEG = \"audio/mpeg\"\nCONTENT_TYPE_NONE = \"none\"\n\nSCAN_INTERVAL = timedelta(seconds=30)\n\n\nasync def async_setup(hass, config):\n \"\"\"Track states and offer events for mailboxes.\"\"\"\n mailboxes = []\n hass.components.frontend.async_register_built_in_panel(\n \"mailbox\", \"mailbox\", \"mdi:mailbox\"\n )\n hass.http.register_view(MailboxPlatformsView(mailboxes))\n hass.http.register_view(MailboxMessageView(mailboxes))\n hass.http.register_view(MailboxMediaView(mailboxes))\n hass.http.register_view(MailboxDeleteView(mailboxes))\n\n async def async_setup_platform(p_type, p_config=None, discovery_info=None):\n \"\"\"Set up a mailbox platform.\"\"\"\n if p_config is None:\n p_config = {}\n if discovery_info is None:\n discovery_info = {}\n\n platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)\n\n if platform is None:\n _LOGGER.error(\"Unknown mailbox platform specified\")\n return\n\n _LOGGER.info(\"Setting up %s.%s\", DOMAIN, p_type)\n mailbox = None\n try:\n if hasattr(platform, \"async_get_handler\"):\n mailbox = await platform.async_get_handler(\n hass, p_config, discovery_info\n )\n elif hasattr(platform, \"get_handler\"):\n mailbox = await hass.async_add_executor_job(\n platform.get_handler, hass, p_config, discovery_info\n )\n else:\n raise HomeAssistantError(\"Invalid mailbox platform.\")\n\n if mailbox is None:\n _LOGGER.error(\"Failed to initialize mailbox platform %s\", p_type)\n return\n\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Error setting up platform %s\", p_type)\n return\n\n mailboxes.append(mailbox)\n mailbox_entity = MailboxEntity(mailbox)\n component = EntityComponent(\n logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL\n )\n await component.async_add_entities([mailbox_entity])\n\n setup_tasks = [\n async_setup_platform(p_type, p_config)\n for p_type, p_config in config_per_platform(config, DOMAIN)\n ]\n\n if setup_tasks:\n await asyncio.wait(setup_tasks)\n\n async def async_platform_discovered(platform, info):\n \"\"\"Handle for discovered platform.\"\"\"\n await async_setup_platform(platform, discovery_info=info)\n\n discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)\n\n return True\n\n\nclass MailboxEntity(Entity):\n \"\"\"Entity for each mailbox platform to provide a badge display.\"\"\"\n\n def __init__(self, mailbox):\n \"\"\"Initialize mailbox entity.\"\"\"\n self.mailbox = mailbox\n self.message_count = 0\n\n async def async_added_to_hass(self):\n \"\"\"Complete entity initialization.\"\"\"\n\n @callback\n def _mailbox_updated(event):\n self.async_schedule_update_ha_state(True)\n\n self.hass.bus.async_listen(EVENT, _mailbox_updated)\n self.async_schedule_update_ha_state(True)\n\n @property\n def state(self):\n \"\"\"Return the state of the binary sensor.\"\"\"\n return str(self.message_count)\n\n @property\n def name(self):\n \"\"\"Return the name of the entity.\"\"\"\n return self.mailbox.name\n\n async def async_update(self):\n \"\"\"Retrieve messages from platform.\"\"\"\n messages = await self.mailbox.async_get_messages()\n self.message_count = len(messages)\n\n\nclass Mailbox:\n \"\"\"Represent a mailbox device.\"\"\"\n\n def __init__(self, hass, name):\n \"\"\"Initialize mailbox object.\"\"\"\n self.hass = hass\n self.name = name\n\n @callback\n def async_update(self):\n \"\"\"Send event notification of updated mailbox.\"\"\"\n self.hass.bus.async_fire(EVENT)\n\n @property\n def media_type(self):\n \"\"\"Return the supported media type.\"\"\"\n raise NotImplementedError()\n\n @property\n def can_delete(self):\n \"\"\"Return if messages can be deleted.\"\"\"\n return False\n\n @property\n def has_media(self):\n \"\"\"Return if messages have attached media files.\"\"\"\n return False\n\n async def async_get_media(self, msgid):\n \"\"\"Return the media blob for the msgid.\"\"\"\n raise NotImplementedError()\n\n async def async_get_messages(self):\n \"\"\"Return a list of the current messages.\"\"\"\n raise NotImplementedError()\n\n async def async_delete(self, msgid):\n \"\"\"Delete the specified messages.\"\"\"\n raise NotImplementedError()\n\n\nclass StreamError(Exception):\n \"\"\"Media streaming exception.\"\"\"\n\n\nclass MailboxView(HomeAssistantView):\n \"\"\"Base mailbox view.\"\"\"\n\n def __init__(self, mailboxes):\n \"\"\"Initialize a basic mailbox view.\"\"\"\n self.mailboxes = mailboxes\n\n def get_mailbox(self, platform):\n \"\"\"Retrieve the specified mailbox.\"\"\"\n for mailbox in self.mailboxes:\n if mailbox.name == platform:\n return mailbox\n raise HTTPNotFound\n\n\nclass MailboxPlatformsView(MailboxView):\n \"\"\"View to return the list of mailbox platforms.\"\"\"\n\n url = \"/api/mailbox/platforms\"\n name = \"api:mailbox:platforms\"\n\n async def get(self, request: web.Request) -> web.Response:\n \"\"\"Retrieve list of platforms.\"\"\"\n platforms = []\n for mailbox in self.mailboxes:\n platforms.append(\n {\n \"name\": mailbox.name,\n \"has_media\": mailbox.has_media,\n \"can_delete\": mailbox.can_delete,\n }\n )\n return self.json(platforms)\n\n\nclass MailboxMessageView(MailboxView):\n \"\"\"View to return the list of messages.\"\"\"\n\n url = \"/api/mailbox/messages/{platform}\"\n name = \"api:mailbox:messages\"\n\n async def get(self, request, platform):\n \"\"\"Retrieve messages.\"\"\"\n mailbox = self.get_mailbox(platform)\n messages = await mailbox.async_get_messages()\n return self.json(messages)\n\n\nclass MailboxDeleteView(MailboxView):\n \"\"\"View to delete selected messages.\"\"\"\n\n url = \"/api/mailbox/delete/{platform}/{msgid}\"\n name = \"api:mailbox:delete\"\n\n async def delete(self, request, platform, msgid):\n \"\"\"Delete items.\"\"\"\n mailbox = self.get_mailbox(platform)\n await mailbox.async_delete(msgid)\n\n\nclass MailboxMediaView(MailboxView):\n \"\"\"View to return a media file.\"\"\"\n\n url = r\"/api/mailbox/media/{platform}/{msgid}\"\n name = \"api:asteriskmbox:media\"\n\n async def get(self, request, platform, msgid):\n \"\"\"Retrieve media.\"\"\"\n mailbox = self.get_mailbox(platform)\n\n with suppress(asyncio.CancelledError, asyncio.TimeoutError):\n with async_timeout.timeout(10):\n try:\n stream = await mailbox.async_get_media(msgid)\n except StreamError as err:\n error_msg = \"Error getting media: %s\" % (err)\n _LOGGER.error(error_msg)\n return web.Response(status=HTTP_INTERNAL_SERVER_ERROR)\n if stream:\n return web.Response(body=stream, content_type=mailbox.media_type)\n\n return web.Response(status=HTTP_INTERNAL_SERVER_ERROR)\n\n"}}},{"rowIdx":362,"cells":{"text":{"kind":"string","value":"\nimport datetime\nimport json\nimport logging\nfrom absl import flags\nfrom perfkitbenchmarker import capacity_reservation\nfrom perfkitbenchmarker import errors\nfrom perfkitbenchmarker import os_types\nfrom perfkitbenchmarker import vm_util\nfrom perfkitbenchmarker.providers import aws\nfrom perfkitbenchmarker.providers.aws import util\n\nFLAGS = flags.FLAGS\n_INSUFFICIENT_CAPACITY = 'InsufficientInstanceCapacity'\n\n\nclass InvalidVmGroupSizeError(Exception):\n pass\n\n\nclass UnsupportedOsTypeError(Exception):\n pass\n\n\nclass CreationError(Exception):\n pass\n\n\nclass AwsCapacityReservation(capacity_reservation.BaseCapacityReservation):\n \"\"\"An object representing an AWS EC2 CapacityReservation.\"\"\"\n CLOUD = aws.CLOUD\n\n def __init__(self, vm_group):\n if not vm_group:\n raise InvalidVmGroupSizeError(\n 'AwsCapacityReservation must be initialized with at least one '\n 'VM in the vm_group.')\n\n super(AwsCapacityReservation, self).__init__(vm_group)\n self.zone_or_region = vm_group[0].zone\n self.region = util.GetRegionFromZone(self.zone_or_region)\n self.machine_type = vm_group[0].machine_type\n self.os_type = vm_group[0].OS_TYPE\n self.vm_count = len(vm_group)\n\n def _Create(self):\n \"\"\"Creates the AWS CapacaityReservation.\n\n A reservation will be created given the VM shape in self.vm_groups.\n Count is determined by the number of VMs in said group. The reservation\n will have a lifetime determined by the general PKB concept of\n timeout_minutes. If the reservation exceeds this timeout, AWS will\n cancel it automatically. The VMs in the reservation will not be deleted.\n Note that an empty capacity reservation will encur costs for the\n VM shape / count, even if no VMs are using it.\n\n After the reservation is created, this method updates all the VMs\n in self.vm_groups by setting the capacity_reservation_id, as well\n as the zone attributes on the VM, and the VM's network instance.\n\n Raises:\n UnsupportedOsTypeError: If creating a capacity reservation for the\n given os type is not supported.\n CreationError: If a capacity reservation cannot be created in the\n region (typically indicates a stockout).\n \"\"\"\n if self.os_type in os_types.LINUX_OS_TYPES:\n instance_platform = 'Linux/UNIX'\n elif self.os_type in os_types.WINDOWS_OS_TYPES:\n instance_platform = 'Windows'\n else:\n raise UnsupportedOsTypeError(\n 'Unsupported os_type for AWS CapacityReservation: %s.'\n % self.os_type)\n\n # If the user did not specify an AZ, we need to try to create the\n # CapacityReservation in a specifc AZ until it succeeds.\n # Then update the zone attribute on all the VMs in the group,\n # as well as the zone attribute on the VMs' network instance.\n if util.IsRegion(self.zone_or_region):\n zones_to_try = util.GetZonesInRegion(self.region)\n else:\n zones_to_try = [self.zone_or_region]\n\n end_date = (\n datetime.datetime.utcnow() +\n datetime.timedelta(minutes=FLAGS.timeout_minutes))\n for zone in zones_to_try:\n cmd = util.AWS_PREFIX + [\n 'ec2',\n 'create-capacity-reservation',\n '--instance-type=%s' % self.machine_type,\n '--instance-platform=%s' % instance_platform,\n '--availability-zone=%s' % zone,\n '--instance-count=%s' % self.vm_count,\n '--instance-match-criteria=targeted',\n '--region=%s' % self.region,\n '--end-date-type=limited',\n '--end-date=%s' % end_date.isoformat(),\n ]\n stdout, stderr, retcode = vm_util.IssueCommand(cmd,\n raise_on_failure=False)\n if retcode:\n logging.info('Unable to create CapacityReservation in %s. '\n 'This may be retried. Details: %s', zone, stderr)\n if _INSUFFICIENT_CAPACITY in stderr:\n logging.error(util.STOCKOUT_MESSAGE)\n raise errors.Benchmarks.InsufficientCapacityCloudFailure(\n util.STOCKOUT_MESSAGE + ' CapacityReservation in ' + zone)\n continue\n json_output = json.loads(stdout)\n self.capacity_reservation_id = (\n json_output['CapacityReservation']['CapacityReservationId'])\n self._UpdateVmsInGroup(self.capacity_reservation_id, zone)\n return\n raise CreationError('Unable to create CapacityReservation in any of the '\n 'following zones: %s.' % zones_to_try)\n\n def _Delete(self):\n \"\"\"Deletes the capacity reservation.\"\"\"\n cmd = util.AWS_PREFIX + [\n 'ec2',\n 'cancel-capacity-reservation',\n '--capacity-reservation-id=%s' % self.capacity_reservation_id,\n '--region=%s' % self.region,\n ]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n\n def _Exists(self):\n \"\"\"Returns true if the underlying reservation exists and is active.\"\"\"\n cmd = util.AWS_PREFIX + [\n 'ec2',\n 'describe-capacity-reservations',\n '--capacity-reservation-id=%s' % self.capacity_reservation_id,\n '--region=%s' % self.region,\n ]\n stdout, _, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)\n if retcode != 0:\n return False\n\n json_output = json.loads(stdout)\n return json_output['CapacityReservations'][0]['State'] == 'active'\n\n def _UpdateVmsInGroup(self, capacity_reservation_id, zone):\n \"\"\"Updates the VMs in a group with necessary reservation details.\n\n AWS virtual machines need to reference the capacity reservation id\n during creation, so it is set on all VMs in the group. Additionally,\n this class may determine which zone to run in, so that needs to be\n updated too (on the VM, and the VM's network instance).\n\n Args:\n capacity_reservation_id: ID of the reservation created by this instance.\n zone: Zone chosen by this class, or if it was supplied, the zone\n provided by the user. In the latter case, setting the zone is equivalent\n to a no-op.\n \"\"\"\n for vm in self.vm_group:\n vm.capacity_reservation_id = capacity_reservation_id\n vm.zone = zone\n vm.network.zone = zone\n\n"}}},{"rowIdx":363,"cells":{"text":{"kind":"string","value":"\nfrom app import SQLAlchemyDB as db\n\n\n# 一些公共的方法,仅仅适合单独操作,对于事务操作,还是需要手工写db.session代码\nclass BaseMethod(object):\n __table_args__ = {'mysql_engine': 'MyISAM', 'mysql_charset': 'utf8'}\n\n # insert and update\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n # delete\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n"}}},{"rowIdx":364,"cells":{"text":{"kind":"string","value":"\nfrom collections import UserDict\nfrom copy import deepcopy\nfrom requests.cookies import RequestsCookieJar\n\nimport os.path\n\nfrom httpobs.scanner.utils import parse_http_equiv_headers\n\n\ndef empty_requests(http_equiv_file=None) -> dict:\n req = {\n 'hostname': 'http-observatory.security.mozilla.org',\n 'resources': {\n '__path__': None,\n '/': None,\n '/clientaccesspolicy.xml': None,\n '/contribute.json': None,\n '/crossdomain.xml': None,\n '/robots.txt': None,\n },\n 'responses': {\n 'auto': UserDict(),\n 'cors': None,\n 'http': None,\n 'https': None,\n },\n 'session': UserDict(),\n }\n\n # Parse the HTML file for its own headers, if requested\n if http_equiv_file:\n __dirname = os.path.abspath(os.path.dirname(__file__))\n\n with open(os.path.join(__dirname, 'unittests', 'files', http_equiv_file), 'r') as f:\n html = f.read()\n\n # Load the HTML file into the object for content tests.\n req['resources']['__path__'] = html\n\n req['responses']['auto'].headers = {\n 'Content-Type': 'text/html',\n }\n req['responses']['auto'].history = []\n req['responses']['auto'].request = UserDict()\n req['responses']['auto'].request.headers = UserDict()\n req['responses']['auto'].status_code = 200\n req['responses']['auto'].url = 'https://http-observatory.security.mozilla.org/'\n req['responses']['auto'].verified = True\n\n req['session'].cookies = RequestsCookieJar()\n\n req['responses']['cors'] = deepcopy(req['responses']['auto'])\n req['responses']['http'] = deepcopy(req['responses']['auto'])\n req['responses']['https'] = deepcopy(req['responses']['auto'])\n\n # Parse the HTML file for its own headers, if requested\n if http_equiv_file:\n req['responses']['auto'].http_equiv = parse_http_equiv_headers(req['resources']['__path__'])\n else:\n req['responses']['auto'].http_equiv = {}\n\n return req\n\n"}}},{"rowIdx":365,"cells":{"text":{"kind":"string","value":"\nimport logging\n\nfrom miio import DeviceException, WifiRepeater # pylint: disable=import-error\nimport voluptuous as vol\n\nfrom homeassistant.components.device_tracker import (\n DOMAIN,\n PLATFORM_SCHEMA,\n DeviceScanner,\n)\nfrom homeassistant.const import CONF_HOST, CONF_TOKEN\nimport homeassistant.helpers.config_validation as cv\n\n_LOGGER = logging.getLogger(__name__)\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Required(CONF_HOST): cv.string,\n vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),\n }\n)\n\n\ndef get_scanner(hass, config):\n \"\"\"Return a Xiaomi MiIO device scanner.\"\"\"\n scanner = None\n host = config[DOMAIN][CONF_HOST]\n token = config[DOMAIN][CONF_TOKEN]\n\n _LOGGER.info(\"Initializing with host %s (token %s...)\", host, token[:5])\n\n try:\n device = WifiRepeater(host, token)\n device_info = device.info()\n _LOGGER.info(\n \"%s %s %s detected\",\n device_info.model,\n device_info.firmware_version,\n device_info.hardware_version,\n )\n scanner = XiaomiMiioDeviceScanner(device)\n except DeviceException as ex:\n _LOGGER.error(\"Device unavailable or token incorrect: %s\", ex)\n\n return scanner\n\n\nclass XiaomiMiioDeviceScanner(DeviceScanner):\n \"\"\"This class queries a Xiaomi Mi WiFi Repeater.\"\"\"\n\n def __init__(self, device):\n \"\"\"Initialize the scanner.\"\"\"\n self.device = device\n\n async def async_scan_devices(self):\n \"\"\"Scan for devices and return a list containing found device IDs.\"\"\"\n devices = []\n try:\n station_info = await self.hass.async_add_executor_job(self.device.status)\n _LOGGER.debug(\"Got new station info: %s\", station_info)\n\n for device in station_info.associated_stations:\n devices.append(device[\"mac\"])\n\n except DeviceException as ex:\n _LOGGER.error(\"Unable to fetch the state: %s\", ex)\n\n return devices\n\n async def async_get_device_name(self, device):\n \"\"\"Return None.\n\n The repeater doesn't provide the name of the associated device.\n \"\"\"\n return None\n\n"}}},{"rowIdx":366,"cells":{"text":{"kind":"string","value":"\nfrom datetime import timedelta\nimport logging\n\nimport voluptuous as vol\n\nfrom homeassistant.const import (\n SERVICE_TOGGLE,\n SERVICE_TURN_OFF,\n SERVICE_TURN_ON,\n STATE_ON,\n)\nfrom homeassistant.helpers.config_validation import ( # noqa: F401\n PLATFORM_SCHEMA,\n PLATFORM_SCHEMA_BASE,\n)\nfrom homeassistant.helpers.entity import ToggleEntity\nfrom homeassistant.helpers.entity_component import EntityComponent\nfrom homeassistant.loader import bind_hass\n\n# mypy: allow-untyped-defs, no-check-untyped-defs\n\nDOMAIN = \"switch\"\nSCAN_INTERVAL = timedelta(seconds=30)\n\nENTITY_ID_FORMAT = DOMAIN + \".{}\"\n\nATTR_TODAY_ENERGY_KWH = \"today_energy_kwh\"\nATTR_CURRENT_POWER_W = \"current_power_w\"\n\nMIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)\n\nPROP_TO_ATTR = {\n \"current_power_w\": ATTR_CURRENT_POWER_W,\n \"today_energy_kwh\": ATTR_TODAY_ENERGY_KWH,\n}\n\nDEVICE_CLASS_OUTLET = \"outlet\"\nDEVICE_CLASS_SWITCH = \"switch\"\n\nDEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH]\n\nDEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))\n\n_LOGGER = logging.getLogger(__name__)\n\n\n@bind_hass\ndef is_on(hass, entity_id):\n \"\"\"Return if the switch is on based on the statemachine.\n\n Async friendly.\n \"\"\"\n return hass.states.is_state(entity_id, STATE_ON)\n\n\nasync def async_setup(hass, config):\n \"\"\"Track states and offer events for switches.\"\"\"\n component = hass.data[DOMAIN] = EntityComponent(\n _LOGGER, DOMAIN, hass, SCAN_INTERVAL\n )\n await component.async_setup(config)\n\n component.async_register_entity_service(SERVICE_TURN_OFF, {}, \"async_turn_off\")\n component.async_register_entity_service(SERVICE_TURN_ON, {}, \"async_turn_on\")\n component.async_register_entity_service(SERVICE_TOGGLE, {}, \"async_toggle\")\n\n return True\n\n\nasync def async_setup_entry(hass, entry):\n \"\"\"Set up a config entry.\"\"\"\n return await hass.data[DOMAIN].async_setup_entry(entry)\n\n\nasync def async_unload_entry(hass, entry):\n \"\"\"Unload a config entry.\"\"\"\n return await hass.data[DOMAIN].async_unload_entry(entry)\n\n\nclass SwitchEntity(ToggleEntity):\n \"\"\"Representation of a switch.\"\"\"\n\n @property\n def current_power_w(self):\n \"\"\"Return the current power usage in W.\"\"\"\n return None\n\n @property\n def today_energy_kwh(self):\n \"\"\"Return the today total energy usage in kWh.\"\"\"\n return None\n\n @property\n def is_standby(self):\n \"\"\"Return true if device is in standby.\"\"\"\n return None\n\n @property\n def state_attributes(self):\n \"\"\"Return the optional state attributes.\"\"\"\n data = {}\n\n for prop, attr in PROP_TO_ATTR.items():\n value = getattr(self, prop)\n if value is not None:\n data[attr] = value\n\n return data\n\n @property\n def device_class(self):\n \"\"\"Return the class of this device, from component DEVICE_CLASSES.\"\"\"\n return None\n\n\nclass SwitchDevice(SwitchEntity):\n \"\"\"Representation of a switch (for backwards compatibility).\"\"\"\n\n def __init_subclass__(cls, **kwargs):\n \"\"\"Print deprecation warning.\"\"\"\n super().__init_subclass__(**kwargs)\n _LOGGER.warning(\n \"SwitchDevice is deprecated, modify %s to extend SwitchEntity\",\n cls.__name__,\n )\n\n"}}},{"rowIdx":367,"cells":{"text":{"kind":"string","value":"\nfrom babelfish import LanguageReverseConverter\n\nfrom ..exceptions import ConfigurationError\n\n\nclass TheSubDBConverter(LanguageReverseConverter):\n def __init__(self):\n self.from_thesubdb = {'en': ('eng',), 'es': ('spa',), 'fr': ('fra',), 'it': ('ita',), 'nl': ('nld',),\n 'pl': ('pol',), 'pt': ('por', 'BR'), 'ro': ('ron',), 'sv': ('swe',), 'tr': ('tur',)}\n self.to_thesubdb = {v: k for k, v in self.from_thesubdb.items()}\n self.codes = set(self.from_thesubdb.keys())\n\n def convert(self, alpha3, country=None, script=None):\n if (alpha3, country) in self.to_thesubdb:\n return self.to_thesubdb[(alpha3, country)]\n if (alpha3,) in self.to_thesubdb:\n return self.to_thesubdb[(alpha3,)]\n\n raise ConfigurationError('Unsupported language for thesubdb: %s, %s, %s' % (alpha3, country, script))\n\n def reverse(self, thesubdb):\n if thesubdb in self.from_thesubdb:\n return self.from_thesubdb[thesubdb]\n\n raise ConfigurationError('Unsupported language code for thesubdb: %s' % thesubdb)\n\n"}}},{"rowIdx":368,"cells":{"text":{"kind":"string","value":"\nfrom gi.repository import Gdk, GObject, Gtk\n\n\nclass DiffGrid(Gtk.Grid):\n __gtype_name__ = \"DiffGrid\"\n\n column_count = 10\n handle_columns = (2, 6)\n\n def __init__(self):\n super().__init__()\n self._in_drag = False\n self._drag_pos = -1\n self._drag_handle = None\n self._handle1 = HandleWindow()\n self._handle2 = HandleWindow()\n\n def do_realize(self):\n Gtk.Grid.do_realize(self)\n self._handle1.realize(self)\n self._handle2.realize(self)\n\n def do_unrealize(self):\n self._handle1.unrealize()\n self._handle2.unrealize()\n Gtk.Grid.do_unrealize(self)\n\n def do_map(self):\n Gtk.Grid.do_map(self)\n\n drag = self.get_child_at(2, 0)\n self._handle1.set_visible(drag and drag.get_visible())\n drag = self.get_child_at(6, 0)\n self._handle2.set_visible(drag and drag.get_visible())\n\n def do_unmap(self):\n self._handle1.set_visible(False)\n self._handle2.set_visible(False)\n Gtk.Grid.do_unmap(self)\n\n def _handle_set_prelight(self, window, flag):\n if hasattr(window, \"handle\"):\n window.handle.set_prelight(flag)\n\n def do_enter_notify_event(self, event):\n if hasattr(event.window, \"handle\"):\n event.window.handle.set_prelight(True)\n\n def do_leave_notify_event(self, event):\n if self._in_drag:\n return\n\n if hasattr(event.window, \"handle\"):\n event.window.handle.set_prelight(False)\n\n def do_button_press_event(self, event):\n if event.button & Gdk.BUTTON_PRIMARY:\n self._drag_pos = event.x\n self._in_drag = True\n return True\n return False\n\n def do_button_release_event(self, event):\n if event.button & Gdk.BUTTON_PRIMARY:\n self._in_drag = False\n return True\n return False\n\n def do_motion_notify_event(self, event):\n if event.state & Gdk.ModifierType.BUTTON1_MASK:\n if hasattr(event.window, \"handle\"):\n x, y = event.window.get_position()\n pos = round(x + event.x - self._drag_pos)\n event.window.handle.set_position(pos)\n self._drag_handle = event.window.handle\n self.queue_resize_no_redraw()\n return True\n return False\n\n def _calculate_positions(\n self, xmin, xmax, pane_sep_width_1, pane_sep_width_2,\n wpane1, wpane2, wpane3):\n wremain = max(0, xmax - xmin - pane_sep_width_1 - pane_sep_width_2)\n pos1 = self._handle1.get_position(wremain, xmin)\n pos2 = self._handle2.get_position(wremain, xmin + pane_sep_width_1)\n\n if not self._drag_handle:\n npanes = 0\n if wpane1 > 0:\n npanes += 1\n if wpane2 > 0:\n npanes += 1\n if wpane3 > 0:\n npanes += 1\n wpane = float(wremain) / max(1, npanes)\n if wpane1 > 0:\n wpane1 = wpane\n if wpane2 > 0:\n wpane2 = wpane\n if wpane3 > 0:\n wpane3 = wpane\n\n xminlink1 = xmin + wpane1\n xmaxlink2 = xmax - wpane3 - pane_sep_width_2\n wlinkpane = pane_sep_width_1 + wpane2\n\n if wpane1 == 0:\n pos1 = xminlink1\n if wpane3 == 0:\n pos2 = xmaxlink2\n if wpane2 == 0:\n if wpane3 == 0:\n pos1 = pos2 - pane_sep_width_2\n else:\n pos2 = pos1 + pane_sep_width_1\n\n if self._drag_handle == self._handle2:\n xminlink2 = xminlink1 + wlinkpane\n pos2 = min(max(xminlink2, pos2), xmaxlink2)\n xmaxlink1 = pos2 - wlinkpane\n pos1 = min(max(xminlink1, pos1), xmaxlink1)\n else:\n xmaxlink1 = xmaxlink2 - wlinkpane\n pos1 = min(max(xminlink1, pos1), xmaxlink1)\n xminlink2 = pos1 + wlinkpane\n pos2 = min(max(xminlink2, pos2), xmaxlink2)\n\n self._handle1.set_position(pos1)\n self._handle2.set_position(pos2)\n return int(round(pos1)), int(round(pos2))\n\n def do_size_allocate(self, allocation):\n # We should be chaining up here to:\n # Gtk.Grid.do_size_allocate(self, allocation)\n # However, when we do this, we hit issues with doing multiple\n # allocations in a single allocation cycle (see bgo#779883).\n\n self.set_allocation(allocation)\n wcols, hrows = self._get_min_sizes()\n yrows = [allocation.y,\n allocation.y + hrows[0],\n # Roughly equivalent to hard-coding row 1 to expand=True\n allocation.y + (allocation.height - hrows[2] - hrows[3]),\n allocation.y + (allocation.height - hrows[3]),\n allocation.y + allocation.height]\n\n (wpane1, wgutter1, wlink1, wgutter2, wpane2, wgutter3, wlink2,\n wgutter4, wpane3, wmap) = wcols\n xmin = allocation.x\n xmax = allocation.x + allocation.width - wmap\n pane_sep_width_1 = wgutter1 + wlink1 + wgutter2\n pane_sep_width_2 = wgutter3 + wlink2 + wgutter4\n pos1, pos2 = self._calculate_positions(\n xmin, xmax, pane_sep_width_1, pane_sep_width_2,\n wpane1, wpane2, wpane3\n )\n wpane1 = pos1 - allocation.x\n wpane2 = pos2 - (pos1 + pane_sep_width_1)\n wpane3 = xmax - (pos2 + pane_sep_width_2)\n wcols = (\n allocation.x, wpane1, wgutter1, wlink1, wgutter2, wpane2,\n wgutter3, wlink2, wgutter4, wpane3, wmap)\n columns = [sum(wcols[:i + 1]) for i in range(len(wcols))]\n\n def child_allocate(child):\n if not child.get_visible():\n return\n left, top, width, height = self.child_get(\n child, 'left-attach', 'top-attach', 'width', 'height')\n # This is a copy, and we have to do this because there's no Python\n # access to Gtk.Allocation.\n child_alloc = self.get_allocation()\n child_alloc.x = columns[left]\n child_alloc.y = yrows[top]\n child_alloc.width = columns[left + width] - columns[left]\n child_alloc.height = yrows[top + height] - yrows[top]\n\n if self.get_direction() == Gtk.TextDirection.RTL:\n child_alloc.x = (\n allocation.x + allocation.width -\n (child_alloc.x - allocation.x) - child_alloc.width)\n\n child.size_allocate(child_alloc)\n\n for child in self.get_children():\n child_allocate(child)\n\n if self.get_realized():\n mapped = self.get_mapped()\n ydrag = yrows[0]\n hdrag = yrows[1] - yrows[0]\n self._handle1.set_visible(mapped and pane_sep_width_1 > 0)\n self._handle1.move_resize(pos1, ydrag, pane_sep_width_1, hdrag)\n self._handle2.set_visible(mapped and pane_sep_width_2 > 0)\n self._handle2.move_resize(pos2, ydrag, pane_sep_width_2, hdrag)\n\n def _get_min_sizes(self):\n hrows = [0] * 4\n wcols = [0] * self.column_count\n for row in range(4):\n for col in range(self.column_count):\n child = self.get_child_at(col, row)\n if child and child.get_visible():\n msize, nsize = child.get_preferred_size()\n # Ignore spanning columns in width calculations; we should\n # do this properly, but it's difficult.\n spanning = GObject.Value(int)\n self.child_get_property(child, 'width', spanning)\n spanning = spanning.get_int()\n # We ignore natural size when calculating required\n # width, but use it when doing required height. The\n # logic here is that height-for-width means that\n # minimum width requisitions mean more-than-minimum\n # heights. This is all extremely dodgy, but works\n # for now.\n if spanning == 1:\n wcols[col] = max(wcols[col], msize.width)\n hrows[row] = max(hrows[row], msize.height, nsize.height)\n return wcols, hrows\n\n def do_draw(self, context):\n Gtk.Grid.do_draw(self, context)\n self._handle1.draw(context)\n self._handle2.draw(context)\n\n\nclass HandleWindow():\n\n # We restrict the handle width because render_handle doesn't pay\n # attention to orientation.\n handle_width = 10\n\n def __init__(self):\n self._widget = None\n self._window = None\n self._area_x = -1\n self._area_y = -1\n self._area_width = 1\n self._area_height = 1\n self._prelit = False\n self._pos = 0.0\n self._transform = (0, 0)\n\n def get_position(self, width, xtrans):\n self._transform = (width, xtrans)\n return float(self._pos * width) + xtrans\n\n def set_position(self, pos):\n width, xtrans = self._transform\n self._pos = float(pos - xtrans) / width\n\n def realize(self, widget):\n attr = Gdk.WindowAttr()\n attr.window_type = Gdk.WindowType.CHILD\n attr.x = self._area_x\n attr.y = self._area_y\n attr.width = self._area_width\n attr.height = self._area_height\n attr.wclass = Gdk.WindowWindowClass.INPUT_OUTPUT\n attr.event_mask = (widget.get_events() |\n Gdk.EventMask.BUTTON_PRESS_MASK |\n Gdk.EventMask.BUTTON_RELEASE_MASK |\n Gdk.EventMask.ENTER_NOTIFY_MASK |\n Gdk.EventMask.LEAVE_NOTIFY_MASK |\n Gdk.EventMask.POINTER_MOTION_MASK)\n attr.cursor = Gdk.Cursor.new_for_display(widget.get_display(),\n Gdk.CursorType.\n SB_H_DOUBLE_ARROW)\n attr_mask = (Gdk.WindowAttributesType.X |\n Gdk.WindowAttributesType.Y |\n Gdk.WindowAttributesType.CURSOR)\n\n parent = widget.get_parent_window()\n self._window = Gdk.Window(parent, attr, attr_mask)\n self._window.handle = self\n self._widget = widget\n self._widget.register_window(self._window)\n\n def unrealize(self):\n self._widget.unregister_window(self._window)\n\n def set_visible(self, visible):\n if visible:\n self._window.show()\n else:\n self._window.hide()\n\n def move_resize(self, x, y, width, height):\n self._window.move_resize(x, y, width, height)\n self._area_x = x\n self._area_y = y\n self._area_width = width\n self._area_height = height\n\n def set_prelight(self, flag):\n self._prelit = flag\n self._widget.queue_draw_area(self._area_x, self._area_y,\n self._area_width, self._area_height)\n\n def draw(self, cairocontext):\n alloc = self._widget.get_allocation()\n padding = 5\n x = self._area_x - alloc.x + padding\n y = self._area_y - alloc.y + padding\n width = max(0, self._area_width - 2 * padding)\n height = max(0, self._area_height - 2 * padding)\n\n if width == 0 or height == 0:\n return\n\n stylecontext = self._widget.get_style_context()\n state = stylecontext.get_state()\n if self._widget.is_focus():\n state |= Gtk.StateFlags.SELECTED\n if self._prelit:\n state |= Gtk.StateFlags.PRELIGHT\n\n if Gtk.cairo_should_draw_window(cairocontext, self._window):\n stylecontext.save()\n stylecontext.set_state(state)\n stylecontext.add_class(Gtk.STYLE_CLASS_PANE_SEPARATOR)\n stylecontext.add_class(Gtk.STYLE_CLASS_VERTICAL)\n color = stylecontext.get_background_color(state)\n if color.alpha > 0.0:\n xcenter = x + width / 2.0 - self.handle_width / 2.0\n Gtk.render_handle(\n stylecontext, cairocontext,\n xcenter, y, self.handle_width, height)\n else:\n xcenter = x + width / 2.0\n Gtk.render_line(stylecontext, cairocontext,\n xcenter, y, xcenter, y + height)\n stylecontext.restore()\n\n"}}},{"rowIdx":369,"cells":{"text":{"kind":"string","value":"\nimport json\n\nfrom pushbullet import PushBullet\nimport pytest\n\nimport homeassistant.components.notify as notify\nfrom homeassistant.setup import async_setup_component\n\nfrom tests.async_mock import patch\nfrom tests.common import assert_setup_component, load_fixture\n\n\n@pytest.fixture\ndef mock_pushbullet():\n \"\"\"Mock pushbullet.\"\"\"\n with patch.object(\n PushBullet,\n \"_get_data\",\n return_value=json.loads(load_fixture(\"pushbullet_devices.json\")),\n ):\n yield\n\n\nasync def test_pushbullet_config(hass, mock_pushbullet):\n \"\"\"Test setup.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n\n\nasync def test_pushbullet_config_bad(hass):\n \"\"\"Test set up the platform with bad/missing configuration.\"\"\"\n config = {notify.DOMAIN: {\"platform\": \"pushbullet\"}}\n with assert_setup_component(0) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert not handle_config[notify.DOMAIN]\n\n\nasync def test_pushbullet_push_default(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\"title\": \"Test Title\", \"message\": \"Test Message\"}\n await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n assert requests_mock.called\n assert requests_mock.call_count == 1\n\n expected_body = {\"body\": \"Test Message\", \"title\": \"Test Title\", \"type\": \"note\"}\n assert requests_mock.last_request.json() == expected_body\n\n\nasync def test_pushbullet_push_device(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\n \"title\": \"Test Title\",\n \"message\": \"Test Message\",\n \"target\": [\"device/DESKTOP\"],\n }\n await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n assert requests_mock.called\n assert requests_mock.call_count == 1\n\n expected_body = {\n \"body\": \"Test Message\",\n \"device_iden\": \"identity1\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.last_request.json() == expected_body\n\n\nasync def test_pushbullet_push_devices(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\n \"title\": \"Test Title\",\n \"message\": \"Test Message\",\n \"target\": [\"device/DESKTOP\", \"device/My iPhone\"],\n }\n await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n assert requests_mock.called\n assert requests_mock.call_count == 2\n assert len(requests_mock.request_history) == 2\n\n expected_body = {\n \"body\": \"Test Message\",\n \"device_iden\": \"identity1\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.request_history[0].json() == expected_body\n expected_body = {\n \"body\": \"Test Message\",\n \"device_iden\": \"identity2\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.request_history[1].json() == expected_body\n\n\nasync def test_pushbullet_push_email(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\n \"title\": \"Test Title\",\n \"message\": \"Test Message\",\n \"target\": [\"email/user@host.net\"],\n }\n await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n assert requests_mock.called\n assert requests_mock.call_count == 1\n assert len(requests_mock.request_history) == 1\n\n expected_body = {\n \"body\": \"Test Message\",\n \"email\": \"user@host.net\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.request_history[0].json() == expected_body\n\n\nasync def test_pushbullet_push_mixed(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\n \"title\": \"Test Title\",\n \"message\": \"Test Message\",\n \"target\": [\"device/DESKTOP\", \"email/user@host.net\"],\n }\n await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n assert requests_mock.called\n assert requests_mock.call_count == 2\n assert len(requests_mock.request_history) == 2\n\n expected_body = {\n \"body\": \"Test Message\",\n \"device_iden\": \"identity1\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.request_history[0].json() == expected_body\n expected_body = {\n \"body\": \"Test Message\",\n \"email\": \"user@host.net\",\n \"title\": \"Test Title\",\n \"type\": \"note\",\n }\n assert requests_mock.request_history[1].json() == expected_body\n\n\nasync def test_pushbullet_push_no_file(hass, requests_mock, mock_pushbullet):\n \"\"\"Test pushbullet push to default target.\"\"\"\n config = {\n notify.DOMAIN: {\n \"name\": \"test\",\n \"platform\": \"pushbullet\",\n \"api_key\": \"MYFAKEKEY\",\n }\n }\n with assert_setup_component(1) as handle_config:\n assert await async_setup_component(hass, notify.DOMAIN, config)\n await hass.async_block_till_done()\n assert handle_config[notify.DOMAIN]\n requests_mock.register_uri(\n \"POST\",\n \"https://api.pushbullet.com/v2/pushes\",\n status_code=200,\n json={\"mock_response\": \"Ok\"},\n )\n data = {\n \"title\": \"Test Title\",\n \"message\": \"Test Message\",\n \"target\": [\"device/DESKTOP\", \"device/My iPhone\"],\n \"data\": {\"file\": \"not_a_file\"},\n }\n assert not await hass.services.async_call(notify.DOMAIN, \"test\", data)\n await hass.async_block_till_done()\n\n"}}},{"rowIdx":370,"cells":{"text":{"kind":"string","value":"\nfrom perfkitbenchmarker import errors\n\nRHEL_REPO = ('[nginx]\\n'\n 'name=nginx repo\\n'\n 'baseurl=https://nginx.org/packages/rhel/$releasever/$basearch/\\n'\n 'gpgcheck=0\\n'\n 'enabled=1')\n\n\ndef YumInstall(vm):\n \"\"\"Installs nginx on the VM.\"\"\"\n vm.RemoteCommand('echo \\'%s\\' | '\n 'sudo tee /etc/yum.repos.d/nginx.repo' % RHEL_REPO)\n try:\n vm.InstallPackages('nginx')\n except errors.VmUtil.SshConnectionError:\n # Amazon Linux does not have a releasever configured.\n vm.RemoteCommand('sudo sed -i -e \"s/\\\\$releasever/6/\" '\n '/etc/yum.repos.d/nginx.repo')\n vm.InstallPackages('nginx')\n\n\ndef AptInstall(vm):\n \"\"\"Installs nginx on the VM.\"\"\"\n vm.InstallPackages('nginx')\n\n"}}},{"rowIdx":371,"cells":{"text":{"kind":"string","value":"\nimport unittest\n\nimport numpy as np\n\nfrom chainer import testing\nfrom chainercv.transforms import translate_bbox\nfrom chainercv.utils.testing.generate_random_bbox import generate_random_bbox\n\n\nclass TestTranslateBbox(unittest.TestCase):\n\n def test_translate_bbox(self):\n size = (32, 24)\n y_offset, x_offset = 5, 3\n bbox = generate_random_bbox(10, size, 0, min(size))\n\n out = translate_bbox(bbox, y_offset=y_offset, x_offset=x_offset)\n bbox_expected = np.empty_like(bbox)\n bbox_expected[:, 0] = bbox[:, 0] + y_offset\n bbox_expected[:, 1] = bbox[:, 1] + x_offset\n bbox_expected[:, 2] = bbox[:, 2] + y_offset\n bbox_expected[:, 3] = bbox[:, 3] + x_offset\n np.testing.assert_equal(out, bbox_expected)\n\n\ntesting.run_module(__name__, __file__)\n\n"}}},{"rowIdx":372,"cells":{"text":{"kind":"string","value":"\nfrom mne.io.utils import _check_orig_units\n\n\ndef test_check_orig_units():\n \"\"\"Test the checking of original units.\"\"\"\n orig_units = dict(FC1='nV', Hfp3erz='n/a', Pz='uV', greekMu='μV',\n microSign='µV')\n orig_units = _check_orig_units(orig_units)\n assert orig_units['FC1'] == 'nV'\n assert orig_units['Hfp3erz'] == 'n/a'\n assert orig_units['Pz'] == 'µV'\n assert orig_units['greekMu'] == 'µV'\n assert orig_units['microSign'] == 'µV'\n\n"}}},{"rowIdx":373,"cells":{"text":{"kind":"string","value":"\nfrom qstrader.asset.asset import Asset\n\n\nclass Equity(Asset):\n \"\"\"\n Stores meta data about an equity common stock or ETF.\n\n Parameters\n ----------\n name : `str`\n The asset's name (e.g. the company name and/or\n share class).\n symbol : `str`\n The asset's original ticker symbol.\n TODO: This will require modification to handle proper\n ticker mapping.\n tax_exempt: `boolean`, optional\n Is the share exempt from government taxation?\n Necessary for taxation on share transactions, such\n as UK stamp duty.\n \"\"\"\n\n def __init__(\n self,\n name,\n symbol,\n tax_exempt=True\n ):\n self.cash_like = False\n self.name = name\n self.symbol = symbol\n self.tax_exempt = tax_exempt\n\n def __repr__(self):\n \"\"\"\n String representation of the Equity Asset.\n \"\"\"\n return (\n \"Equity(name='%s', symbol='%s', tax_exempt=%s)\" % (\n self.name, self.symbol, self.tax_exempt\n )\n )\n\n"}}},{"rowIdx":374,"cells":{"text":{"kind":"string","value":"\nfrom weblate.billing.models import Billing\nfrom weblate.billing.tasks import billing_notify\nfrom weblate.utils.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n \"\"\"Command for billing check.\"\"\"\n\n help = \"checks billing limits\"\n\n def add_arguments(self, parser):\n parser.add_argument(\"--valid\", action=\"store_true\", help=\"list valid ones\")\n parser.add_argument(\n \"--notify\", action=\"store_true\", help=\"send email notifications\"\n )\n\n def handle(self, *args, **options):\n if options[\"notify\"]:\n billing_notify()\n return\n Billing.objects.check_limits()\n if options[\"valid\"]:\n for bill in Billing.objects.get_valid():\n self.stdout.write(f\" * {bill}\")\n return\n limit = Billing.objects.get_out_of_limits()\n due = Billing.objects.get_unpaid()\n\n if limit:\n self.stdout.write(\"Following billings are over limit:\")\n for bill in limit:\n self.stdout.write(f\" * {bill}\")\n\n if due:\n self.stdout.write(\"Following billings are past due date:\")\n for bill in due:\n self.stdout.write(f\" * {bill}\")\n\n"}}},{"rowIdx":375,"cells":{"text":{"kind":"string","value":"\nimport os\nfrom mimetypes import guess_type\nfrom urllib.parse import urljoin\n\nfrom bs4 import BeautifulSoup\n\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.contrib.sites.models import Site\nfrom django.contrib.syndication.views import Feed\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.shortcuts import get_object_or_404\nfrom django.template.defaultfilters import slugify\nfrom django.urls import NoReverseMatch\nfrom django.urls import reverse\nfrom django.utils.encoding import smart_str\nfrom django.utils.feedgenerator import Atom1Feed\nfrom django.utils.translation import gettext as _\n\nimport django_comments as comments\n\nfrom tagging.models import Tag\nfrom tagging.models import TaggedItem\n\nfrom zinnia.models.author import Author\nfrom zinnia.models.entry import Entry\nfrom zinnia.settings import COPYRIGHT\nfrom zinnia.settings import FEEDS_FORMAT\nfrom zinnia.settings import FEEDS_MAX_ITEMS\nfrom zinnia.settings import PROTOCOL\nfrom zinnia.templatetags.zinnia import get_gravatar\nfrom zinnia.views.categories import get_category_or_404\n\n\nclass ZinniaFeed(Feed):\n \"\"\"\n Base Feed class for the Zinnia application,\n enriched for a more convenient usage.\n \"\"\"\n protocol = PROTOCOL\n feed_copyright = COPYRIGHT\n feed_format = FEEDS_FORMAT\n limit = FEEDS_MAX_ITEMS\n\n def __init__(self):\n if self.feed_format == 'atom':\n self.feed_type = Atom1Feed\n self.subtitle = getattr(self, 'description', None)\n\n def title(self, obj=None):\n \"\"\"\n Title of the feed prefixed with the site name.\n \"\"\"\n return '%s - %s' % (self.site.name, self.get_title(obj))\n\n def get_title(self, obj):\n raise NotImplementedError\n\n @property\n def site(self):\n \"\"\"\n Acquire the current site used.\n \"\"\"\n return Site.objects.get_current()\n\n @property\n def site_url(self):\n \"\"\"\n Return the URL of the current site.\n \"\"\"\n return '%s://%s' % (self.protocol, self.site.domain)\n\n\nclass EntryFeed(ZinniaFeed):\n \"\"\"\n Base Entry Feed.\n \"\"\"\n title_template = 'feeds/entry_title.html'\n description_template = 'feeds/entry_description.html'\n\n def item_pubdate(self, item):\n \"\"\"\n Publication date of an entry.\n \"\"\"\n return item.publication_date\n\n def item_updateddate(self, item):\n \"\"\"\n Update date of an entry.\n \"\"\"\n return item.last_update\n\n def item_categories(self, item):\n \"\"\"\n Entry's categories.\n \"\"\"\n return [category.title for category in item.categories.all()]\n\n def item_author_name(self, item):\n \"\"\"\n Return the first author of an entry.\n \"\"\"\n if item.authors.count():\n self.item_author = item.authors.all()[0]\n return self.item_author.__str__()\n\n def item_author_email(self, item):\n \"\"\"\n Return the first author's email.\n Should not be called if self.item_author_name has returned None.\n \"\"\"\n return self.item_author.email\n\n def item_author_link(self, item):\n \"\"\"\n Return the author's URL.\n Should not be called if self.item_author_name has returned None.\n \"\"\"\n try:\n author_url = self.item_author.get_absolute_url()\n return self.site_url + author_url\n except NoReverseMatch:\n return self.site_url\n\n def item_enclosure_url(self, item):\n \"\"\"\n Return an image for enclosure.\n \"\"\"\n try:\n url = item.image.url\n except (AttributeError, ValueError):\n img = BeautifulSoup(item.html_content, 'html.parser').find('img')\n url = img.get('src') if img else None\n self.cached_enclosure_url = url\n if url:\n url = urljoin(self.site_url, url)\n if self.feed_format == 'rss':\n url = url.replace('https://', 'http://')\n return url\n\n def item_enclosure_length(self, item):\n \"\"\"\n Try to obtain the size of the enclosure if it's present on the FS,\n otherwise returns an hardcoded value.\n Note: this method is only called if item_enclosure_url\n has returned something.\n \"\"\"\n try:\n return str(item.image.size)\n except (AttributeError, ValueError, os.error):\n pass\n return '100000'\n\n def item_enclosure_mime_type(self, item):\n \"\"\"\n Guess the enclosure's mimetype.\n Note: this method is only called if item_enclosure_url\n has returned something.\n \"\"\"\n mime_type, encoding = guess_type(self.cached_enclosure_url)\n if mime_type:\n return mime_type\n return 'image/jpeg'\n\n\nclass LastEntries(EntryFeed):\n \"\"\"\n Feed for the last entries.\n \"\"\"\n\n def link(self):\n \"\"\"\n URL of last entries.\n \"\"\"\n return reverse('zinnia:entry_archive_index')\n\n def items(self):\n \"\"\"\n Items are published entries.\n \"\"\"\n return Entry.published.all()[:self.limit]\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed\n \"\"\"\n return _('Last entries')\n\n def description(self):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last entries on the site %(object)s') % {\n 'object': self.site.name}\n\n\nclass CategoryEntries(EntryFeed):\n \"\"\"\n Feed filtered by a category.\n \"\"\"\n\n def get_object(self, request, path):\n \"\"\"\n Retrieve the category by his path.\n \"\"\"\n return get_category_or_404(path)\n\n def items(self, obj):\n \"\"\"\n Items are the published entries of the category.\n \"\"\"\n return obj.entries_published()[:self.limit]\n\n def link(self, obj):\n \"\"\"\n URL of the category.\n \"\"\"\n return obj.get_absolute_url()\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Entries for the category %(object)s') % {'object': obj.title}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return (obj.description or\n _('The last entries categorized under %(object)s') % {\n 'object': obj.title})\n\n\nclass AuthorEntries(EntryFeed):\n \"\"\"\n Feed filtered by an author.\n \"\"\"\n\n def get_object(self, request, username):\n \"\"\"\n Retrieve the author by his username.\n \"\"\"\n return get_object_or_404(Author, **{Author.USERNAME_FIELD: username})\n\n def items(self, obj):\n \"\"\"\n Items are the published entries of the author.\n \"\"\"\n return obj.entries_published()[:self.limit]\n\n def link(self, obj):\n \"\"\"\n URL of the author.\n \"\"\"\n return obj.get_absolute_url()\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Entries for the author %(object)s') % {\n 'object': smart_str(obj.__str__())}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last entries by %(object)s') % {\n 'object': smart_str(obj.__str__())}\n\n\nclass TagEntries(EntryFeed):\n \"\"\"\n Feed filtered by a tag.\n \"\"\"\n\n def get_object(self, request, tag):\n \"\"\"\n Retrieve the tag by his name.\n \"\"\"\n return get_object_or_404(Tag, name=tag)\n\n def items(self, obj):\n \"\"\"\n Items are the published entries of the tag.\n \"\"\"\n return TaggedItem.objects.get_by_model(\n Entry.published.all(), obj)[:self.limit]\n\n def link(self, obj):\n \"\"\"\n URL of the tag.\n \"\"\"\n return reverse('zinnia:tag_detail', args=[obj.name])\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Entries for the tag %(object)s') % {'object': obj.name}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last entries tagged with %(object)s') % {\n 'object': obj.name}\n\n\nclass SearchEntries(EntryFeed):\n \"\"\"\n Feed filtered by a search pattern.\n \"\"\"\n\n def get_object(self, request):\n \"\"\"\n The GET parameter 'pattern' is the object.\n \"\"\"\n pattern = request.GET.get('pattern', '')\n if len(pattern) < 3:\n raise ObjectDoesNotExist\n return pattern\n\n def items(self, obj):\n \"\"\"\n Items are the published entries founds.\n \"\"\"\n return Entry.published.search(obj)[:self.limit]\n\n def link(self, obj):\n \"\"\"\n URL of the search request.\n \"\"\"\n return '%s?pattern=%s' % (reverse('zinnia:entry_search'), obj)\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _(\"Search results for '%(pattern)s'\") % {'pattern': obj}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _(\"The last entries containing the pattern '%(pattern)s'\") % {\n 'pattern': obj}\n\n\nclass DiscussionFeed(ZinniaFeed):\n \"\"\"\n Base class for discussion Feed.\n \"\"\"\n title_template = 'feeds/discussion_title.html'\n description_template = 'feeds/discussion_description.html'\n\n def item_pubdate(self, item):\n \"\"\"\n Publication date of a discussion.\n \"\"\"\n return item.submit_date\n\n def item_link(self, item):\n \"\"\"\n URL of the discussion item.\n \"\"\"\n return item.get_absolute_url()\n\n def item_author_name(self, item):\n \"\"\"\n Author of the discussion item.\n \"\"\"\n return item.name\n\n def item_author_email(self, item):\n \"\"\"\n Author's email of the discussion item.\n \"\"\"\n return item.email\n\n def item_author_link(self, item):\n \"\"\"\n Author's URL of the discussion.\n \"\"\"\n return item.url\n\n\nclass LastDiscussions(DiscussionFeed):\n \"\"\"\n Feed for the last discussions.\n \"\"\"\n\n def items(self):\n \"\"\"\n Items are the discussions on the entries.\n \"\"\"\n content_type = ContentType.objects.get_for_model(Entry)\n return comments.get_model().objects.filter(\n content_type=content_type, is_public=True).order_by(\n '-submit_date')[:self.limit]\n\n def link(self):\n \"\"\"\n URL of last discussions.\n \"\"\"\n return reverse('zinnia:entry_archive_index')\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Last discussions')\n\n def description(self):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last discussions on the site %(object)s') % {\n 'object': self.site.name}\n\n\nclass EntryDiscussions(DiscussionFeed):\n \"\"\"\n Feed for discussions on an entry.\n \"\"\"\n\n def get_object(self, request, year, month, day, slug):\n \"\"\"\n Retrieve the discussions by entry's slug.\n \"\"\"\n return get_object_or_404(Entry, slug=slug,\n publication_date__year=year,\n publication_date__month=month,\n publication_date__day=day)\n\n def items(self, obj):\n \"\"\"\n Items are the discussions on the entry.\n \"\"\"\n return obj.discussions[:self.limit]\n\n def link(self, obj):\n \"\"\"\n URL of the entry.\n \"\"\"\n return obj.get_absolute_url()\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Discussions on %(object)s') % {'object': obj.title}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last discussions on the entry %(object)s') % {\n 'object': obj.title}\n\n\nclass EntryComments(EntryDiscussions):\n \"\"\"\n Feed for comments on an entry.\n \"\"\"\n title_template = 'feeds/comment_title.html'\n description_template = 'feeds/comment_description.html'\n\n def items(self, obj):\n \"\"\"\n Items are the comments on the entry.\n \"\"\"\n return obj.comments[:self.limit]\n\n def item_link(self, item):\n \"\"\"\n URL of the comment.\n \"\"\"\n return item.get_absolute_url('#comment-%(id)s-by-'\n ) + slugify(item.user_name)\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Comments on %(object)s') % {'object': obj.title}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last comments on the entry %(object)s') % {\n 'object': obj.title}\n\n def item_enclosure_url(self, item):\n \"\"\"\n Return a gravatar image for enclosure.\n \"\"\"\n return get_gravatar(item.email)\n\n def item_enclosure_length(self, item):\n \"\"\"\n Hardcoded enclosure length.\n \"\"\"\n return '100000'\n\n def item_enclosure_mime_type(self, item):\n \"\"\"\n Hardcoded enclosure mimetype.\n \"\"\"\n return 'image/jpeg'\n\n\nclass EntryPingbacks(EntryDiscussions):\n \"\"\"\n Feed for pingbacks on an entry.\n \"\"\"\n title_template = 'feeds/pingback_title.html'\n description_template = 'feeds/pingback_description.html'\n\n def items(self, obj):\n \"\"\"\n Items are the pingbacks on the entry.\n \"\"\"\n return obj.pingbacks[:self.limit]\n\n def item_link(self, item):\n \"\"\"\n URL of the pingback.\n \"\"\"\n return item.get_absolute_url('#pingback-%(id)s')\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Pingbacks on %(object)s') % {'object': obj.title}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last pingbacks on the entry %(object)s') % {\n 'object': obj.title}\n\n\nclass EntryTrackbacks(EntryDiscussions):\n \"\"\"\n Feed for trackbacks on an entry.\n \"\"\"\n title_template = 'feeds/trackback_title.html'\n description_template = 'feeds/trackback_description.html'\n\n def items(self, obj):\n \"\"\"\n Items are the trackbacks on the entry.\n \"\"\"\n return obj.trackbacks[:self.limit]\n\n def item_link(self, item):\n \"\"\"\n URL of the trackback.\n \"\"\"\n return item.get_absolute_url('#trackback-%(id)s')\n\n def get_title(self, obj):\n \"\"\"\n Title of the feed.\n \"\"\"\n return _('Trackbacks on %(object)s') % {'object': obj.title}\n\n def description(self, obj):\n \"\"\"\n Description of the feed.\n \"\"\"\n return _('The last trackbacks on the entry %(object)s') % {\n 'object': obj.title}\n\n"}}},{"rowIdx":376,"cells":{"text":{"kind":"string","value":"\nfrom unittest.mock import Mock, patch\n\nfrom homeassistant.components import unifi\nfrom homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN\nfrom homeassistant.setup import async_setup_component\n\nfrom .test_controller import setup_unifi_integration\n\nfrom tests.async_mock import AsyncMock\nfrom tests.common import MockConfigEntry, mock_coro\n\n\nasync def test_setup_with_no_config(hass):\n \"\"\"Test that we do not discover anything or try to set up a bridge.\"\"\"\n assert await async_setup_component(hass, UNIFI_DOMAIN, {}) is True\n assert UNIFI_DOMAIN not in hass.data\n\n\nasync def test_successful_config_entry(hass):\n \"\"\"Test that configured options for a host are loaded via config entry.\"\"\"\n await setup_unifi_integration(hass)\n assert hass.data[UNIFI_DOMAIN]\n\n\nasync def test_controller_fail_setup(hass):\n \"\"\"Test that a failed setup still stores controller.\"\"\"\n with patch(\"homeassistant.components.unifi.UniFiController\") as mock_controller:\n mock_controller.return_value.async_setup = AsyncMock(return_value=False)\n await setup_unifi_integration(hass)\n\n assert hass.data[UNIFI_DOMAIN] == {}\n\n\nasync def test_controller_no_mac(hass):\n \"\"\"Test that configured options for a host are loaded via config entry.\"\"\"\n entry = MockConfigEntry(\n domain=UNIFI_DOMAIN,\n data={\n \"controller\": {\n \"host\": \"0.0.0.0\",\n \"username\": \"user\",\n \"password\": \"pass\",\n \"port\": 80,\n \"site\": \"default\",\n \"verify_ssl\": True,\n },\n \"poe_control\": True,\n },\n )\n entry.add_to_hass(hass)\n mock_registry = Mock()\n with patch(\n \"homeassistant.components.unifi.UniFiController\"\n ) as mock_controller, patch(\n \"homeassistant.helpers.device_registry.async_get_registry\",\n return_value=mock_coro(mock_registry),\n ):\n mock_controller.return_value.async_setup = AsyncMock(return_value=True)\n mock_controller.return_value.mac = None\n assert await unifi.async_setup_entry(hass, entry) is True\n\n assert len(mock_controller.mock_calls) == 2\n\n assert len(mock_registry.mock_calls) == 0\n\n\nasync def test_unload_entry(hass):\n \"\"\"Test being able to unload an entry.\"\"\"\n controller = await setup_unifi_integration(hass)\n assert hass.data[UNIFI_DOMAIN]\n\n assert await unifi.async_unload_entry(hass, controller.config_entry)\n assert not hass.data[UNIFI_DOMAIN]\n\n"}}},{"rowIdx":377,"cells":{"text":{"kind":"string","value":"\nfrom logilab.common.testlib import TestCase, unittest_main\nfrom logilab.common.tree import *\n\ntree = ('root', (\n ('child_1_1', (\n ('child_2_1', ()), ('child_2_2', (\n ('child_3_1', ()),\n )))),\n ('child_1_2', (('child_2_3', ()),))))\n\ndef make_tree(tuple):\n n = Node(tuple[0])\n for child in tuple[1]:\n n.append(make_tree(child))\n return n\n\nclass Node_ClassTest(TestCase):\n \"\"\" a basic tree node, caracterised by an id\"\"\"\n def setUp(self):\n \"\"\" called before each test from this class \"\"\"\n self.o = make_tree(tree)\n\n\n def test_flatten(self):\n result = [r.id for r in self.o.flatten()]\n expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']\n self.assertListEqual(result, expected)\n\n def test_flatten_with_outlist(self):\n resultnodes = []\n self.o.flatten(resultnodes)\n result = [r.id for r in resultnodes]\n expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']\n self.assertListEqual(result, expected)\n\n\n def test_known_values_remove(self):\n \"\"\"\n remove a child node\n \"\"\"\n self.o.remove(self.o.get_node_by_id('child_1_1'))\n self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1')\n\n def test_known_values_replace(self):\n \"\"\"\n replace a child node with another\n \"\"\"\n self.o.replace(self.o.get_node_by_id('child_1_1'), Node('hoho'))\n self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1')\n self.assertEqual(self.o.get_node_by_id('hoho'), self.o.children[0])\n\n def test_known_values_get_sibling(self):\n \"\"\"\n return the sibling node that has given id\n \"\"\"\n self.assertEqual(self.o.children[0].get_sibling('child_1_2'), self.o.children[1], None)\n\n def test_raise_get_sibling_NodeNotFound(self):\n self.assertRaises(NodeNotFound, self.o.children[0].get_sibling, 'houhou')\n\n def test_known_values_get_node_by_id(self):\n \"\"\"\n return node in whole hierarchy that has given id\n \"\"\"\n self.assertEqual(self.o.get_node_by_id('child_1_1'), self.o.children[0])\n\n def test_raise_get_node_by_id_NodeNotFound(self):\n self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'houhou')\n\n def test_known_values_get_child_by_id(self):\n \"\"\"\n return child of given id\n \"\"\"\n self.assertEqual(self.o.get_child_by_id('child_2_1', recurse=1), self.o.children[0].children[0])\n\n def test_raise_get_child_by_id_NodeNotFound(self):\n self.assertRaises(NodeNotFound, self.o.get_child_by_id, nid='child_2_1')\n self.assertRaises(NodeNotFound, self.o.get_child_by_id, 'houhou')\n\n def test_known_values_get_child_by_path(self):\n \"\"\"\n return child of given path (path is a list of ids)\n \"\"\"\n self.assertEqual(self.o.get_child_by_path(['root', 'child_1_1', 'child_2_1']), self.o.children[0].children[0])\n\n def test_raise_get_child_by_path_NodeNotFound(self):\n self.assertRaises(NodeNotFound, self.o.get_child_by_path, ['child_1_1', 'child_2_11'])\n\n def test_known_values_depth_down(self):\n \"\"\"\n return depth of this node in the tree\n \"\"\"\n self.assertEqual(self.o.depth_down(), 4)\n self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth_down(), 1)\n\n def test_known_values_depth(self):\n \"\"\"\n return depth of this node in the tree\n \"\"\"\n self.assertEqual(self.o.depth(), 0)\n self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth(), 2)\n\n def test_known_values_width(self):\n \"\"\"\n return depth of this node in the tree\n \"\"\"\n self.assertEqual(self.o.width(), 3)\n self.assertEqual(self.o.get_child_by_id('child_2_1', True).width(), 1)\n\n def test_known_values_root(self):\n \"\"\"\n return the root node of the tree\n \"\"\"\n self.assertEqual(self.o.get_child_by_id('child_2_1', True).root(), self.o)\n\n def test_known_values_leaves(self):\n \"\"\"\n return a list with all the leaf nodes descendant from this task\n \"\"\"\n self.assertEqual(self.o.leaves(), [self.o.get_child_by_id('child_2_1', True),\n self.o.get_child_by_id('child_3_1', True),\n self.o.get_child_by_id('child_2_3', True)])\n\n def test_known_values_lineage(self):\n c31 = self.o.get_child_by_id('child_3_1', True)\n self.assertEqual(c31.lineage(), [self.o.get_child_by_id('child_3_1', True),\n self.o.get_child_by_id('child_2_2', True),\n self.o.get_child_by_id('child_1_1', True),\n self.o])\n\n\nclass post_order_list_FunctionTest(TestCase):\n \"\"\"\"\"\"\n def setUp(self):\n \"\"\" called before each test from this class \"\"\"\n self.o = make_tree(tree)\n\n def test_known_values_post_order_list(self):\n \"\"\"\n create a list with tree nodes for which the function returned true\n in a post order foashion\n \"\"\"\n L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root']\n l = [n.id for n in post_order_list(self.o)]\n self.assertEqual(l, L, l)\n\n def test_known_values_post_order_list2(self):\n \"\"\"\n create a list with tree nodes for which the function returned true\n in a post order foashion\n \"\"\"\n def filter(node):\n if node.id == 'child_2_2':\n return 0\n return 1\n L = ['child_2_1', 'child_1_1', 'child_2_3', 'child_1_2', 'root']\n l = [n.id for n in post_order_list(self.o, filter)]\n self.assertEqual(l, L, l)\n\n\nclass PostfixedDepthFirstIterator_ClassTest(TestCase):\n \"\"\"\"\"\"\n def setUp(self):\n \"\"\" called before each test from this class \"\"\"\n self.o = make_tree(tree)\n\n def test_known_values_next(self):\n L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root']\n iter = PostfixedDepthFirstIterator(self.o)\n o = next(iter)\n i = 0\n while o:\n self.assertEqual(o.id, L[i])\n o = next(iter)\n i += 1\n\n\nclass pre_order_list_FunctionTest(TestCase):\n \"\"\"\"\"\"\n def setUp(self):\n \"\"\" called before each test from this class \"\"\"\n self.o = make_tree(tree)\n\n def test_known_values_pre_order_list(self):\n \"\"\"\n create a list with tree nodes for which the function returned true\n in a pre order fashion\n \"\"\"\n L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']\n l = [n.id for n in pre_order_list(self.o)]\n self.assertEqual(l, L, l)\n\n def test_known_values_pre_order_list2(self):\n \"\"\"\n create a list with tree nodes for which the function returned true\n in a pre order fashion\n \"\"\"\n def filter(node):\n if node.id == 'child_2_2':\n return 0\n return 1\n L = ['root', 'child_1_1', 'child_2_1', 'child_1_2', 'child_2_3']\n l = [n.id for n in pre_order_list(self.o, filter)]\n self.assertEqual(l, L, l)\n\n\nclass PrefixedDepthFirstIterator_ClassTest(TestCase):\n \"\"\"\"\"\"\n def setUp(self):\n \"\"\" called before each test from this class \"\"\"\n self.o = make_tree(tree)\n\n def test_known_values_next(self):\n L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']\n iter = PrefixedDepthFirstIterator(self.o)\n o = next(iter)\n i = 0\n while o:\n self.assertEqual(o.id, L[i])\n o = next(iter)\n i += 1\n\n\nif __name__ == '__main__':\n unittest_main()\n\n"}}},{"rowIdx":378,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.helpers import discovery\nfrom homeassistant.setup import async_setup_component\n\nfrom .common import NUMATO_CFG, mockup_raise\n\nMOCKUP_ENTITY_IDS = {\n \"binary_sensor.numato_binary_sensor_mock_port2\",\n \"binary_sensor.numato_binary_sensor_mock_port3\",\n \"binary_sensor.numato_binary_sensor_mock_port4\",\n}\n\n\nasync def test_failing_setups_no_entities(hass, numato_fixture, monkeypatch):\n \"\"\"When port setup fails, no entity shall be created.\"\"\"\n monkeypatch.setattr(numato_fixture.NumatoDeviceMock, \"setup\", mockup_raise)\n assert await async_setup_component(hass, \"numato\", NUMATO_CFG)\n await hass.async_block_till_done()\n for entity_id in MOCKUP_ENTITY_IDS:\n assert entity_id not in hass.states.async_entity_ids()\n\n\nasync def test_setup_callbacks(hass, numato_fixture, monkeypatch):\n \"\"\"During setup a callback shall be registered.\"\"\"\n\n numato_fixture.discover()\n\n def mock_add_event_detect(self, port, callback, direction):\n assert self == numato_fixture.devices[0]\n assert port == 1\n assert callback is callable\n assert direction == numato_fixture.BOTH\n\n monkeypatch.setattr(\n numato_fixture.devices[0], \"add_event_detect\", mock_add_event_detect\n )\n assert await async_setup_component(hass, \"numato\", NUMATO_CFG)\n\n\nasync def test_hass_binary_sensor_notification(hass, numato_fixture):\n \"\"\"Test regular operations from within Home Assistant.\"\"\"\n assert await async_setup_component(hass, \"numato\", NUMATO_CFG)\n await hass.async_block_till_done() # wait until services are registered\n assert (\n hass.states.get(\"binary_sensor.numato_binary_sensor_mock_port2\").state == \"on\"\n )\n await hass.async_add_executor_job(numato_fixture.devices[0].callbacks[2], 2, False)\n await hass.async_block_till_done()\n assert (\n hass.states.get(\"binary_sensor.numato_binary_sensor_mock_port2\").state == \"off\"\n )\n\n\nasync def test_binary_sensor_setup_without_discovery_info(hass, config, numato_fixture):\n \"\"\"Test handling of empty discovery_info.\"\"\"\n numato_fixture.discover()\n await discovery.async_load_platform(hass, \"binary_sensor\", \"numato\", None, config)\n for entity_id in MOCKUP_ENTITY_IDS:\n assert entity_id not in hass.states.async_entity_ids()\n await hass.async_block_till_done() # wait for numato platform to be loaded\n for entity_id in MOCKUP_ENTITY_IDS:\n assert entity_id in hass.states.async_entity_ids()\n\n"}}},{"rowIdx":379,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.components.cover import (\n ATTR_POSITION,\n SUPPORT_CLOSE,\n SUPPORT_CLOSE_TILT,\n SUPPORT_OPEN,\n SUPPORT_OPEN_TILT,\n SUPPORT_SET_POSITION,\n SUPPORT_SET_TILT_POSITION,\n SUPPORT_STOP,\n SUPPORT_STOP_TILT,\n CoverEntity,\n)\nfrom homeassistant.core import callback\nfrom homeassistant.helpers.dispatcher import async_dispatcher_connect\n\nfrom .base import AcmedaBase\nfrom .const import ACMEDA_HUB_UPDATE, DOMAIN\nfrom .helpers import async_add_acmeda_entities\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up the Acmeda Rollers from a config entry.\"\"\"\n hub = hass.data[DOMAIN][config_entry.entry_id]\n\n current = set()\n\n @callback\n def async_add_acmeda_covers():\n async_add_acmeda_entities(\n hass, AcmedaCover, config_entry, current, async_add_entities\n )\n\n hub.cleanup_callbacks.append(\n async_dispatcher_connect(\n hass,\n ACMEDA_HUB_UPDATE.format(config_entry.entry_id),\n async_add_acmeda_covers,\n )\n )\n\n\nclass AcmedaCover(AcmedaBase, CoverEntity):\n \"\"\"Representation of a Acmeda cover device.\"\"\"\n\n @property\n def current_cover_position(self):\n \"\"\"Return the current position of the roller blind.\n\n None is unknown, 0 is closed, 100 is fully open.\n \"\"\"\n position = None\n if self.roller.type != 7:\n position = 100 - self.roller.closed_percent\n return position\n\n @property\n def current_cover_tilt_position(self):\n \"\"\"Return the current tilt of the roller blind.\n\n None is unknown, 0 is closed, 100 is fully open.\n \"\"\"\n position = None\n if self.roller.type in [7, 10]:\n position = 100 - self.roller.closed_percent\n return position\n\n @property\n def supported_features(self):\n \"\"\"Flag supported features.\"\"\"\n supported_features = 0\n if self.current_cover_position is not None:\n supported_features |= (\n SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP | SUPPORT_SET_POSITION\n )\n if self.current_cover_tilt_position is not None:\n supported_features |= (\n SUPPORT_OPEN_TILT\n | SUPPORT_CLOSE_TILT\n | SUPPORT_STOP_TILT\n | SUPPORT_SET_TILT_POSITION\n )\n\n return supported_features\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed.\"\"\"\n return self.roller.closed_percent == 100\n\n async def async_close_cover(self, **kwargs):\n \"\"\"Close the roller.\"\"\"\n await self.roller.move_down()\n\n async def async_open_cover(self, **kwargs):\n \"\"\"Open the roller.\"\"\"\n await self.roller.move_up()\n\n async def async_stop_cover(self, **kwargs):\n \"\"\"Stop the roller.\"\"\"\n await self.roller.move_stop()\n\n async def async_set_cover_position(self, **kwargs):\n \"\"\"Move the roller shutter to a specific position.\"\"\"\n await self.roller.move_to(100 - kwargs[ATTR_POSITION])\n\n async def async_close_cover_tilt(self, **kwargs):\n \"\"\"Close the roller.\"\"\"\n await self.roller.move_down()\n\n async def async_open_cover_tilt(self, **kwargs):\n \"\"\"Open the roller.\"\"\"\n await self.roller.move_up()\n\n async def async_stop_cover_tilt(self, **kwargs):\n \"\"\"Stop the roller.\"\"\"\n await self.roller.move_stop()\n\n async def async_set_cover_tilt(self, **kwargs):\n \"\"\"Tilt the roller shutter to a specific position.\"\"\"\n await self.roller.move_to(100 - kwargs[ATTR_POSITION])\n\n"}}},{"rowIdx":380,"cells":{"text":{"kind":"string","value":"\nimport logging\n\nimport synology_srm\nimport voluptuous as vol\n\nfrom homeassistant.components.device_tracker import (\n DOMAIN,\n PLATFORM_SCHEMA,\n DeviceScanner,\n)\nfrom homeassistant.const import (\n CONF_HOST,\n CONF_PASSWORD,\n CONF_PORT,\n CONF_SSL,\n CONF_USERNAME,\n CONF_VERIFY_SSL,\n)\nimport homeassistant.helpers.config_validation as cv\n\n_LOGGER = logging.getLogger(__name__)\n\nDEFAULT_USERNAME = \"admin\"\nDEFAULT_PORT = 8001\nDEFAULT_SSL = True\nDEFAULT_VERIFY_SSL = False\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Required(CONF_HOST): cv.string,\n vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,\n vol.Required(CONF_PASSWORD): cv.string,\n vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,\n vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,\n vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,\n }\n)\n\nATTRIBUTE_ALIAS = {\n \"band\": None,\n \"connection\": None,\n \"current_rate\": None,\n \"dev_type\": None,\n \"hostname\": None,\n \"ip6_addr\": None,\n \"ip_addr\": None,\n \"is_baned\": \"is_banned\",\n \"is_beamforming_on\": None,\n \"is_guest\": None,\n \"is_high_qos\": None,\n \"is_low_qos\": None,\n \"is_manual_dev_type\": None,\n \"is_manual_hostname\": None,\n \"is_online\": None,\n \"is_parental_controled\": \"is_parental_controlled\",\n \"is_qos\": None,\n \"is_wireless\": None,\n \"mac\": None,\n \"max_rate\": None,\n \"mesh_node_id\": None,\n \"rate_quality\": None,\n \"signalstrength\": \"signal_strength\",\n \"transferRXRate\": \"transfer_rx_rate\",\n \"transferTXRate\": \"transfer_tx_rate\",\n}\n\n\ndef get_scanner(hass, config):\n \"\"\"Validate the configuration and return Synology SRM scanner.\"\"\"\n scanner = SynologySrmDeviceScanner(config[DOMAIN])\n\n return scanner if scanner.success_init else None\n\n\nclass SynologySrmDeviceScanner(DeviceScanner):\n \"\"\"This class scans for devices connected to a Synology SRM router.\"\"\"\n\n def __init__(self, config):\n \"\"\"Initialize the scanner.\"\"\"\n\n self.client = synology_srm.Client(\n host=config[CONF_HOST],\n port=config[CONF_PORT],\n username=config[CONF_USERNAME],\n password=config[CONF_PASSWORD],\n https=config[CONF_SSL],\n )\n\n if not config[CONF_VERIFY_SSL]:\n self.client.http.disable_https_verify()\n\n self.devices = []\n self.success_init = self._update_info()\n\n _LOGGER.info(\"Synology SRM scanner initialized\")\n\n def scan_devices(self):\n \"\"\"Scan for new devices and return a list with found device IDs.\"\"\"\n self._update_info()\n\n return [device[\"mac\"] for device in self.devices]\n\n def get_extra_attributes(self, device) -> dict:\n \"\"\"Get the extra attributes of a device.\"\"\"\n device = next(\n (result for result in self.devices if result[\"mac\"] == device), None\n )\n filtered_attributes = {}\n if not device:\n return filtered_attributes\n for attribute, alias in ATTRIBUTE_ALIAS.items():\n value = device.get(attribute)\n if value is None:\n continue\n attr = alias or attribute\n filtered_attributes[attr] = value\n return filtered_attributes\n\n def get_device_name(self, device):\n \"\"\"Return the name of the given device or None if we don't know.\"\"\"\n filter_named = [\n result[\"hostname\"] for result in self.devices if result[\"mac\"] == device\n ]\n\n if filter_named:\n return filter_named[0]\n\n return None\n\n def _update_info(self):\n \"\"\"Check the router for connected devices.\"\"\"\n _LOGGER.debug(\"Scanning for connected devices\")\n\n try:\n self.devices = self.client.core.get_network_nsm_device({\"is_online\": True})\n except synology_srm.http.SynologyException as ex:\n _LOGGER.error(\"Error with the Synology SRM: %s\", ex)\n return False\n\n _LOGGER.debug(\"Found %d device(s) connected to the router\", len(self.devices))\n\n return True\n\n"}}},{"rowIdx":381,"cells":{"text":{"kind":"string","value":"\nfrom datetime import timedelta\nimport logging\n\nimport Adafruit_DHT # pylint: disable=import-error\nimport voluptuous as vol\n\nfrom homeassistant.components.sensor import PLATFORM_SCHEMA\nfrom homeassistant.const import (\n CONF_MONITORED_CONDITIONS,\n CONF_NAME,\n PERCENTAGE,\n TEMP_FAHRENHEIT,\n)\nimport homeassistant.helpers.config_validation as cv\nfrom homeassistant.helpers.entity import Entity\nfrom homeassistant.util import Throttle\nfrom homeassistant.util.temperature import celsius_to_fahrenheit\n\n_LOGGER = logging.getLogger(__name__)\n\nCONF_PIN = \"pin\"\nCONF_SENSOR = \"sensor\"\nCONF_HUMIDITY_OFFSET = \"humidity_offset\"\nCONF_TEMPERATURE_OFFSET = \"temperature_offset\"\n\nDEFAULT_NAME = \"DHT Sensor\"\n\n# DHT11 is able to deliver data once per second, DHT22 once every two\nMIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)\n\nSENSOR_TEMPERATURE = \"temperature\"\nSENSOR_HUMIDITY = \"humidity\"\nSENSOR_TYPES = {\n SENSOR_TEMPERATURE: [\"Temperature\", None],\n SENSOR_HUMIDITY: [\"Humidity\", PERCENTAGE],\n}\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(\n {\n vol.Required(CONF_SENSOR): cv.string,\n vol.Required(CONF_PIN): cv.string,\n vol.Optional(CONF_MONITORED_CONDITIONS, default=[]): vol.All(\n cv.ensure_list, [vol.In(SENSOR_TYPES)]\n ),\n vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,\n vol.Optional(CONF_TEMPERATURE_OFFSET, default=0): vol.All(\n vol.Coerce(float), vol.Range(min=-100, max=100)\n ),\n vol.Optional(CONF_HUMIDITY_OFFSET, default=0): vol.All(\n vol.Coerce(float), vol.Range(min=-100, max=100)\n ),\n }\n)\n\n\ndef setup_platform(hass, config, add_entities, discovery_info=None):\n \"\"\"Set up the DHT sensor.\"\"\"\n\n SENSOR_TYPES[SENSOR_TEMPERATURE][1] = hass.config.units.temperature_unit\n available_sensors = {\n \"AM2302\": Adafruit_DHT.AM2302,\n \"DHT11\": Adafruit_DHT.DHT11,\n \"DHT22\": Adafruit_DHT.DHT22,\n }\n sensor = available_sensors.get(config[CONF_SENSOR])\n pin = config[CONF_PIN]\n temperature_offset = config[CONF_TEMPERATURE_OFFSET]\n humidity_offset = config[CONF_HUMIDITY_OFFSET]\n\n if not sensor:\n _LOGGER.error(\"DHT sensor type is not supported\")\n return False\n\n data = DHTClient(Adafruit_DHT, sensor, pin)\n dev = []\n name = config[CONF_NAME]\n\n try:\n for variable in config[CONF_MONITORED_CONDITIONS]:\n dev.append(\n DHTSensor(\n data,\n variable,\n SENSOR_TYPES[variable][1],\n name,\n temperature_offset,\n humidity_offset,\n )\n )\n except KeyError:\n pass\n\n add_entities(dev, True)\n\n\nclass DHTSensor(Entity):\n \"\"\"Implementation of the DHT sensor.\"\"\"\n\n def __init__(\n self,\n dht_client,\n sensor_type,\n temp_unit,\n name,\n temperature_offset,\n humidity_offset,\n ):\n \"\"\"Initialize the sensor.\"\"\"\n self.client_name = name\n self._name = SENSOR_TYPES[sensor_type][0]\n self.dht_client = dht_client\n self.temp_unit = temp_unit\n self.type = sensor_type\n self.temperature_offset = temperature_offset\n self.humidity_offset = humidity_offset\n self._state = None\n self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]\n\n @property\n def name(self):\n \"\"\"Return the name of the sensor.\"\"\"\n return f\"{self.client_name} {self._name}\"\n\n @property\n def state(self):\n \"\"\"Return the state of the sensor.\"\"\"\n return self._state\n\n @property\n def unit_of_measurement(self):\n \"\"\"Return the unit of measurement of this entity, if any.\"\"\"\n return self._unit_of_measurement\n\n def update(self):\n \"\"\"Get the latest data from the DHT and updates the states.\"\"\"\n self.dht_client.update()\n temperature_offset = self.temperature_offset\n humidity_offset = self.humidity_offset\n data = self.dht_client.data\n\n if self.type == SENSOR_TEMPERATURE and SENSOR_TEMPERATURE in data:\n temperature = data[SENSOR_TEMPERATURE]\n _LOGGER.debug(\n \"Temperature %.1f \\u00b0C + offset %.1f\",\n temperature,\n temperature_offset,\n )\n if -20 <= temperature < 80:\n self._state = round(temperature + temperature_offset, 1)\n if self.temp_unit == TEMP_FAHRENHEIT:\n self._state = round(celsius_to_fahrenheit(temperature), 1)\n elif self.type == SENSOR_HUMIDITY and SENSOR_HUMIDITY in data:\n humidity = data[SENSOR_HUMIDITY]\n _LOGGER.debug(\"Humidity %.1f%% + offset %.1f\", humidity, humidity_offset)\n if 0 <= humidity <= 100:\n self._state = round(humidity + humidity_offset, 1)\n\n\nclass DHTClient:\n \"\"\"Get the latest data from the DHT sensor.\"\"\"\n\n def __init__(self, adafruit_dht, sensor, pin):\n \"\"\"Initialize the sensor.\"\"\"\n self.adafruit_dht = adafruit_dht\n self.sensor = sensor\n self.pin = pin\n self.data = {}\n\n @Throttle(MIN_TIME_BETWEEN_UPDATES)\n def update(self):\n \"\"\"Get the latest data the DHT sensor.\"\"\"\n humidity, temperature = self.adafruit_dht.read_retry(self.sensor, self.pin)\n if temperature:\n self.data[SENSOR_TEMPERATURE] = temperature\n if humidity:\n self.data[SENSOR_HUMIDITY] = humidity\n\n"}}},{"rowIdx":382,"cells":{"text":{"kind":"string","value":"\nfrom __future__ import unicode_literals\nfrom builtins import object\nimport copy\nimport deepdish as dd\nimport numpy as np\nfrom .tools.normalize import normalize as normalizer\nfrom .tools.reduce import reduce as reducer\nfrom .tools.align import align as aligner\nfrom .tools.format_data import format_data\nfrom ._shared.helpers import convert_text, get_dtype\nfrom .config import __version__\n\n\nclass DataGeometry(object):\n \"\"\"\n Hypertools data object class\n\n A DataGeometry object contains the data, figure handles and transform\n functions used to create a plot. Note: this class should not be called\n directly, but is used by the `hyp.plot` function to create a plot object.\n\n Parameters\n ----------\n\n fig : matplotlib.Figure\n The matplotlib figure handle for the plot\n\n ax : matplotlib.Axes\n The matplotlib axes handle for the plot\n\n line_ani : matplotlib.animation.FuncAnimation\n The matplotlib animation handle (if the plot is an animation)\n\n data : list\n A list of numpy arrays representing the raw data\n\n xform_data : list\n A list of numpy arrays representing the transformed data\n\n reduce : dict\n A dictionary containing the reduction model and parameters\n\n align : dict\n A dictionary containing align model and parameters\n\n normalize : str\n A string representing the kind of normalization\n\n kwargs : dict\n A dictionary containing all kwargs passed to the plot function\n\n version : str\n The version of the software used to create the class instance\n\n \"\"\"\n\n def __init__(self, fig=None, ax=None, line_ani=None, data=None, xform_data=None,\n reduce=None, align=None, normalize=None, semantic=None,\n vectorizer=None, corpus=None, kwargs=None, version=__version__,\n dtype=None):\n\n # matplotlib figure handle\n self.fig = fig\n\n # matplotlib axis handle\n self.ax = ax\n\n # matplotlib line_ani handle (if its an animation)\n self.line_ani = line_ani\n\n # convert to numpy array if text\n if isinstance(data, list):\n data = list(map(convert_text, data))\n self.data = data\n self.dtype = get_dtype(data)\n\n # the transformed data\n self.xform_data = xform_data\n\n # dictionary of model and model_params\n self.reduce = reduce\n\n # 'hyper', 'SRM' or None\n self.align = align\n\n # 'within', 'across', 'row' or False\n self.normalize = normalize\n\n # text params\n self.semantic = semantic\n self.vectorizer = vectorizer\n\n self.corpus = corpus\n\n # dictionary of kwargs\n self.kwargs = kwargs\n\n # hypertools version\n self.version = version\n\n def get_data(self):\n \"\"\"Return a copy of the data\"\"\"\n return copy.copy(self.data)\n\n def get_formatted_data(self):\n \"\"\"Return a formatted copy of the data\"\"\"\n return format_data(self.data)\n\n # a function to transform new data\n def transform(self, data=None):\n \"\"\"\n Return transformed data, or transform new data using the same model\n parameters\n\n Parameters\n ----------\n data : numpy array, pandas dataframe or list of arrays/dfs\n The data to transform. If no data is passed, the xform_data from\n the DataGeometry object will be returned.\n\n Returns\n ----------\n xformed_data : list of numpy arrays\n The transformed data\n\n \"\"\"\n # if no new data passed,\n if data is None:\n return self.xform_data\n else:\n formatted = format_data(\n data,\n semantic=self.semantic,\n vectorizer=self.vectorizer,\n corpus=self.corpus,\n ppca=True)\n norm = normalizer(formatted, normalize=self.normalize)\n reduction = reducer(\n norm,\n reduce=self.reduce,\n ndims=self.reduce['params']['n_components'])\n return aligner(reduction, align=self.align)\n\n # a function to plot the data\n def plot(self, data=None, **kwargs):\n \"\"\"\n Plot the data\n\n Parameters\n ----------\n data : numpy array, pandas dataframe or list of arrays/dfs\n The data to plot. If no data is passed, the xform_data from\n the DataGeometry object will be returned.\n\n kwargs : keyword arguments\n Any keyword arguments supported by `hypertools.plot` are also supported\n by this method\n\n Returns\n ----------\n geo : hypertools.DataGeometry\n A new data geometry object\n\n \"\"\"\n\n # import plot here to avoid circular imports\n from .plot.plot import plot as plotter\n\n if data is None:\n d = copy.copy(self.data)\n transform = copy.copy(self.xform_data)\n if any([k in kwargs for k in ['reduce', 'align', 'normalize',\n 'semantic', 'vectorizer', 'corpus']]):\n d = copy.copy(self.data)\n transform = None\n else:\n d = data\n transform = None\n\n # get kwargs and update with new kwargs\n new_kwargs = copy.copy(self.kwargs)\n update_kwargs = dict(transform=transform, reduce=self.reduce,\n align=self.align, normalize=self.normalize,\n semantic=self.semantic, vectorizer=self.vectorizer,\n corpus=self.corpus)\n new_kwargs.update(update_kwargs)\n for key in kwargs:\n new_kwargs.update({key : kwargs[key]})\n return plotter(d, **new_kwargs)\n\n def save(self, fname, compression='blosc'):\n \"\"\"\n Save method for the data geometry object\n\n The data will be saved as a 'geo' file, which is a dictionary containing\n the elements of a data geometry object saved in the hd5 format using\n `deepdish`.\n\n Parameters\n ----------\n\n fname : str\n A name for the file. If the file extension (.geo) is not specified,\n it will be appended.\n\n compression : str\n The kind of compression to use. See the deepdish documentation for\n options: http://deepdish.readthedocs.io/en/latest/api_io.html#deepdish.io.save\n\n \"\"\"\n if hasattr(self, 'dtype'):\n if 'list' in self.dtype:\n data = np.array(self.data)\n elif 'df' in self.dtype:\n data = {k: np.array(v).astype('str') for k, v in self.data.to_dict('list').items()}\n else:\n data = self.data\n\n # put geo vars into a dict\n geo = {\n 'data' : data,\n 'xform_data' : np.array(self.xform_data),\n 'reduce' : self.reduce,\n 'align' : self.align,\n 'normalize' : self.normalize,\n 'semantic' : self.semantic,\n 'corpus' : np.array(self.corpus) if isinstance(self.corpus, list) else self.corpus,\n 'kwargs' : self.kwargs,\n 'version' : self.version,\n 'dtype' : self.dtype\n }\n\n # if extension wasn't included, add it\n if fname[-4:]!='.geo':\n fname+='.geo'\n\n # save\n dd.io.save(fname, geo, compression=compression)\n\n"}}},{"rowIdx":383,"cells":{"text":{"kind":"string","value":"\nimport numpy as np\nimport pandas as pd\n\nimport xarray as xr\n\nfrom . import parameterized, randn, requires_dask\n\nnx = 3000\nlong_nx = 30000000\nny = 2000\nnt = 1000\nwindow = 20\n\nrandn_xy = randn((nx, ny), frac_nan=0.1)\nrandn_xt = randn((nx, nt))\nrandn_t = randn((nt,))\nrandn_long = randn((long_nx,), frac_nan=0.1)\n\n\nnew_x_short = np.linspace(0.3 * nx, 0.7 * nx, 100)\nnew_x_long = np.linspace(0.3 * nx, 0.7 * nx, 1000)\nnew_y_long = np.linspace(0.1, 0.9, 1000)\n\n\nclass Interpolation:\n def setup(self, *args, **kwargs):\n self.ds = xr.Dataset(\n {\n \"var1\": ((\"x\", \"y\"), randn_xy),\n \"var2\": ((\"x\", \"t\"), randn_xt),\n \"var3\": ((\"t\",), randn_t),\n },\n coords={\n \"x\": np.arange(nx),\n \"y\": np.linspace(0, 1, ny),\n \"t\": pd.date_range(\"1970-01-01\", periods=nt, freq=\"D\"),\n \"x_coords\": (\"x\", np.linspace(1.1, 2.1, nx)),\n },\n )\n\n @parameterized([\"method\", \"is_short\"], ([\"linear\", \"cubic\"], [True, False]))\n def time_interpolation(self, method, is_short):\n new_x = new_x_short if is_short else new_x_long\n self.ds.interp(x=new_x, method=method).load()\n\n @parameterized([\"method\"], ([\"linear\", \"nearest\"]))\n def time_interpolation_2d(self, method):\n self.ds.interp(x=new_x_long, y=new_y_long, method=method).load()\n\n\nclass InterpolationDask(Interpolation):\n def setup(self, *args, **kwargs):\n requires_dask()\n super().setup(**kwargs)\n self.ds = self.ds.chunk({\"t\": 50})\n\n"}}},{"rowIdx":384,"cells":{"text":{"kind":"string","value":"\nimport imp\nimport os\n\nimport base\nfrom nose import tools\n\nfrom docker_registry.lib import checksums\nfrom docker_registry.lib import xtarfile\n\n\n# setting like this in test, due to flake8 H302\ntarfile = xtarfile.tarfile\n\n# To test whether the UnicodeDecodeError still exists\n# (it's still present in python 3.4.0)\n# ((loading this way, since we've monkey patched currently loaded tarfile))\ntarfile_vanilla = imp.load_module('test_failing', *imp.find_module('tarfile'))\n\n\nclass TestTarfile(base.TestCase):\n @tools.raises(UnicodeDecodeError)\n def test_vanilla_tarfile(self):\n layer_fh = open(os.path.join(base.data_dir, \"xattr/layer.tar\"))\n tar = tarfile_vanilla.open(mode='r|*', fileobj=layer_fh)\n assert tar\n\n def test_headers(self):\n expected = {\n \"46af0962ab5afeb5ce6740d4d91652e69206fc991fd5328c1a94d364ad00e457/layer.tar\": { # noqa\n \"dev\": {\n \"headers\": {\"size\": 0, \"mode\": 0o40755, \"type\": \"5\"},\n \"pax\": {},\n },\n \"dev/core\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"dev/stderr\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"dev/stdout\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"dev/fd\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"dev/ptmx\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"dev/stdin\": {\n \"headers\": {\"size\": 0, \"mode\": 0o120777, \"type\": \"2\"},\n \"pax\": {},\n },\n \"etc\": {\n \"headers\": {\"size\": 0, \"mode\": 0o40755, \"type\": \"5\"},\n \"pax\": {},\n },\n \"etc/sudoers\": {\n \"headers\": {\"size\": 3348, \"mode\": 0o100440, \"type\": \"0\"},\n \"pax\": {},\n },\n },\n \"511136ea3c5a64f264b78b5433614aec563103b4d4702f3ba7d4d2698e22c158/layer.tar\": { # noqa\n \".\": {\n \"headers\": {\"size\": 0, \"mode\": 0o40755, \"type\": \"5\"},\n \"pax\": {},\n },\n },\n \"xattr/layer.tar\": {\n \"file\": {\n \"headers\": {\"size\": 0, \"mode\": 0o100644, \"type\": \"0\"},\n \"pax\": {u\"SCHILY.xattr.security.capability\": \"\\x01\\x00\\x00\\x02\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"}, # noqa\n },\n },\n }\n for file in expected.keys():\n layer_fh = open(os.path.join(base.data_dir, file))\n tar = tarfile.open(mode='r|*', fileobj=layer_fh)\n member_count = 0\n for member in tar:\n member_count += 1\n # check that we know the file names\n msg = \"in %s, did not find file %s\" % (file, member.path)\n l = len(filter(lambda x: member.path in x,\n expected[file].keys()))\n assert (l > 0), msg\n e = expected[file][member.path]\n for attr in e[\"headers\"].keys():\n msg = \"in %s:%s, expected %s of %s, but got %s\" % (\n file, member.path, attr, e[\"headers\"][attr],\n getattr(member, attr))\n assert e[\"headers\"][attr] == getattr(member, attr), msg\n for attr in e[\"pax\"].keys():\n msg = b\"in %s:%s, expected %s of %s, but got %s\".format(\n file, member.path, attr, e[\"pax\"][attr],\n member.pax_headers[attr])\n assert e[\"pax\"][attr] == member.pax_headers[attr], msg\n\n assert member_count == len(expected[file])\n layer_fh.close()\n\n def test_tarsum(self):\n expected = {\n \"46af0962ab5afeb5ce6740d4d91652e69206fc991fd5328c1a94d364ad00e457\": \"tarsum+sha256:e58fcf7418d4390dec8e8fb69d88c06ec07039d651fedd3aa72af9972e7d046b\", # noqa\n \"511136ea3c5a64f264b78b5433614aec563103b4d4702f3ba7d4d2698e22c158\": \"tarsum+sha256:ac672ee85da9ab7f9667ae3c32841d3e42f33cc52c273c23341dabba1c8b0c8b\", # noqa\n \"xattr\": \"tarsum+sha256:e86f81a4d552f13039b1396ed03ca968ea9717581f9577ef1876ea6ff9b38c98\", # noqa\n }\n for layer in expected.keys():\n layer_fh = open(os.path.join(base.data_dir, layer, \"layer.tar\"))\n json_fh = open(os.path.join(base.data_dir, layer, \"json\"))\n\n tarsum = checksums.TarSum(json_fh.read())\n tar = tarfile.open(mode='r|*', fileobj=layer_fh)\n for member in tar:\n tarsum.append(member, tar)\n sum = tarsum.compute()\n msg = \"layer %s, expected [%s] but got [%s]\" % (\n layer, expected[layer], sum)\n assert expected[layer] == sum, msg\n\n layer_fh.close()\n json_fh.close()\n\n"}}},{"rowIdx":385,"cells":{"text":{"kind":"string","value":"\nimport unittest\n\nimport numpy as np\n\nimport chainer\nfrom chainer.initializers import Zero\nfrom chainer.testing import attr\nfrom chainer import Variable\n\nfrom chainercv.links import VGG16\nfrom chainercv.utils import testing\n\n\n@testing.parameterize(\n {'pick': 'prob', 'shapes': (1, 200), 'n_class': 200},\n {'pick': 'pool5', 'shapes': (1, 512, 7, 7), 'n_class': None},\n {'pick': ['conv5_3', 'conv4_2'],\n 'shapes': ((1, 512, 14, 14), (1, 512, 28, 28)), 'n_class': None},\n)\nclass TestVGG16Call(unittest.TestCase):\n\n def setUp(self):\n self.link = VGG16(\n n_class=self.n_class, pretrained_model=None,\n initialW=Zero())\n self.link.pick = self.pick\n\n def check_call(self):\n xp = self.link.xp\n\n x1 = Variable(xp.asarray(np.random.uniform(\n -1, 1, (1, 3, 224, 224)).astype(np.float32)))\n with chainer.no_backprop_mode():\n features = self.link(x1)\n if isinstance(features, tuple):\n for activation, shape in zip(features, self.shapes):\n self.assertEqual(activation.shape, shape)\n else:\n self.assertEqual(features.shape, self.shapes)\n self.assertEqual(features.dtype, np.float32)\n\n @attr.slow\n def test_call_cpu(self):\n self.check_call()\n\n @attr.gpu\n @attr.slow\n def test_call_gpu(self):\n self.link.to_gpu()\n self.check_call()\n\n\n@testing.parameterize(*testing.product({\n 'n_class': [None, 500, 1000],\n 'pretrained_model': ['imagenet'],\n 'mean': [None, np.random.uniform((3, 1, 1)).astype(np.float32)],\n}))\nclass TestVGG16Pretrained(unittest.TestCase):\n\n @attr.slow\n def test_pretrained(self):\n kwargs = {\n 'n_class': self.n_class,\n 'pretrained_model': self.pretrained_model,\n 'mean': self.mean,\n }\n\n if self.pretrained_model == 'imagenet':\n valid = self.n_class in {None, 1000}\n\n if valid:\n VGG16(**kwargs)\n else:\n with self.assertRaises(ValueError):\n VGG16(**kwargs)\n\n\ntesting.run_module(__name__, __file__)\n\n"}}},{"rowIdx":386,"cells":{"text":{"kind":"string","value":"\nfrom homeassistant.components.light import (\n ATTR_BRIGHTNESS,\n ATTR_BRIGHTNESS_PCT,\n ATTR_COLOR_NAME,\n ATTR_COLOR_TEMP,\n ATTR_EFFECT,\n ATTR_FLASH,\n ATTR_HS_COLOR,\n ATTR_KELVIN,\n ATTR_PROFILE,\n ATTR_RGB_COLOR,\n ATTR_TRANSITION,\n ATTR_WHITE_VALUE,\n ATTR_XY_COLOR,\n DOMAIN,\n)\nfrom homeassistant.const import (\n ATTR_ENTITY_ID,\n ENTITY_MATCH_ALL,\n SERVICE_TOGGLE,\n SERVICE_TURN_OFF,\n SERVICE_TURN_ON,\n)\nfrom homeassistant.loader import bind_hass\n\n\n@bind_hass\ndef turn_on(\n hass,\n entity_id=ENTITY_MATCH_ALL,\n transition=None,\n brightness=None,\n brightness_pct=None,\n rgb_color=None,\n xy_color=None,\n hs_color=None,\n color_temp=None,\n kelvin=None,\n white_value=None,\n profile=None,\n flash=None,\n effect=None,\n color_name=None,\n):\n \"\"\"Turn all or specified light on.\"\"\"\n hass.add_job(\n async_turn_on,\n hass,\n entity_id,\n transition,\n brightness,\n brightness_pct,\n rgb_color,\n xy_color,\n hs_color,\n color_temp,\n kelvin,\n white_value,\n profile,\n flash,\n effect,\n color_name,\n )\n\n\nasync def async_turn_on(\n hass,\n entity_id=ENTITY_MATCH_ALL,\n transition=None,\n brightness=None,\n brightness_pct=None,\n rgb_color=None,\n xy_color=None,\n hs_color=None,\n color_temp=None,\n kelvin=None,\n white_value=None,\n profile=None,\n flash=None,\n effect=None,\n color_name=None,\n):\n \"\"\"Turn all or specified light on.\"\"\"\n data = {\n key: value\n for key, value in [\n (ATTR_ENTITY_ID, entity_id),\n (ATTR_PROFILE, profile),\n (ATTR_TRANSITION, transition),\n (ATTR_BRIGHTNESS, brightness),\n (ATTR_BRIGHTNESS_PCT, brightness_pct),\n (ATTR_RGB_COLOR, rgb_color),\n (ATTR_XY_COLOR, xy_color),\n (ATTR_HS_COLOR, hs_color),\n (ATTR_COLOR_TEMP, color_temp),\n (ATTR_KELVIN, kelvin),\n (ATTR_WHITE_VALUE, white_value),\n (ATTR_FLASH, flash),\n (ATTR_EFFECT, effect),\n (ATTR_COLOR_NAME, color_name),\n ]\n if value is not None\n }\n\n await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)\n\n\n@bind_hass\ndef turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None):\n \"\"\"Turn all or specified light off.\"\"\"\n hass.add_job(async_turn_off, hass, entity_id, transition)\n\n\nasync def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None):\n \"\"\"Turn all or specified light off.\"\"\"\n data = {\n key: value\n for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_TRANSITION, transition)]\n if value is not None\n }\n\n await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)\n\n\n@bind_hass\ndef toggle(\n hass,\n entity_id=ENTITY_MATCH_ALL,\n transition=None,\n brightness=None,\n brightness_pct=None,\n rgb_color=None,\n xy_color=None,\n hs_color=None,\n color_temp=None,\n kelvin=None,\n white_value=None,\n profile=None,\n flash=None,\n effect=None,\n color_name=None,\n):\n \"\"\"Toggle all or specified light.\"\"\"\n hass.add_job(\n async_toggle,\n hass,\n entity_id,\n transition,\n brightness,\n brightness_pct,\n rgb_color,\n xy_color,\n hs_color,\n color_temp,\n kelvin,\n white_value,\n profile,\n flash,\n effect,\n color_name,\n )\n\n\nasync def async_toggle(\n hass,\n entity_id=ENTITY_MATCH_ALL,\n transition=None,\n brightness=None,\n brightness_pct=None,\n rgb_color=None,\n xy_color=None,\n hs_color=None,\n color_temp=None,\n kelvin=None,\n white_value=None,\n profile=None,\n flash=None,\n effect=None,\n color_name=None,\n):\n \"\"\"Turn all or specified light on.\"\"\"\n data = {\n key: value\n for key, value in [\n (ATTR_ENTITY_ID, entity_id),\n (ATTR_PROFILE, profile),\n (ATTR_TRANSITION, transition),\n (ATTR_BRIGHTNESS, brightness),\n (ATTR_BRIGHTNESS_PCT, brightness_pct),\n (ATTR_RGB_COLOR, rgb_color),\n (ATTR_XY_COLOR, xy_color),\n (ATTR_HS_COLOR, hs_color),\n (ATTR_COLOR_TEMP, color_temp),\n (ATTR_KELVIN, kelvin),\n (ATTR_WHITE_VALUE, white_value),\n (ATTR_FLASH, flash),\n (ATTR_EFFECT, effect),\n (ATTR_COLOR_NAME, color_name),\n ]\n if value is not None\n }\n\n await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True)\n\n"}}},{"rowIdx":387,"cells":{"text":{"kind":"string","value":"\nfrom docker_registry.core import compat\nfrom docker_registry.core import lru\n\n# In case you want to mock (and that doesn't work well)\n# import mock\n# import mockredis\n# @mock.patch('docker_registry.core.lru.redis.StrictRedis',\n# mockredis.mock_strict_redis_client)\n# def boot():\n# lru.init()\n\n# boot()\n\nlru.init()\n\n\nclass Dumb(object):\n\n value = {}\n\n @lru.get\n def get(self, key):\n if key not in self.value:\n return None\n return self.value[key]\n\n @lru.set\n def set(self, key, value):\n self.value[key] = value\n\n @lru.remove\n def remove(self, key):\n if key not in self.value:\n return\n del self.value[key]\n\n\nclass TestLru(object):\n\n def setUp(self):\n self._dumb = Dumb()\n\n def testNonExistentGet(self):\n assert not self._dumb.get('nonexistent')\n assert not self._dumb.get('nonexistent')\n\n def testSetSimple1(self):\n content = 'bar'\n result = b'bar'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n assert self._dumb.get('foo') == result\n\n def testSetBytes1(self):\n content = b'foo'\n result = b'foo'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testSetBytes2(self):\n content = b'\\xc3\\x9f'\n result = b'\\xc3\\x9f'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testSetUnicode1(self):\n content = u'foo'\n result = b'foo'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testSetUnicode2(self):\n content = u'ß'\n result = b'\\xc3\\x9f'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testSetUnicode3(self):\n content = u'ß'.encode('utf8')\n result = b'\\xc3\\x9f'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testSetUnicode4(self):\n content = 'ß'\n if compat.is_py2:\n content = content.decode('utf8')\n content = content.encode('utf8')\n result = b'\\xc3\\x9f'\n self._dumb.set('foo', content)\n assert self._dumb.get('foo') == result\n\n def testRemove(self):\n self._dumb.set('foo', 'bar')\n assert self._dumb.get('foo')\n self._dumb.remove('foo')\n assert not self._dumb.get('foo')\n assert not self._dumb.get('foo')\n\n"}}},{"rowIdx":388,"cells":{"text":{"kind":"string","value":"\nfrom test import CollectorTestCase\nfrom test import get_collector_config\nfrom test import unittest\nfrom mock import patch\n\nfrom diamond.collector import Collector\nfrom example import ExampleCollector\n\n##########################################################################\n\n\nclass TestExampleCollector(CollectorTestCase):\n\n def setUp(self):\n config = get_collector_config('ExampleCollector', {\n 'interval': 10\n })\n\n self.collector = ExampleCollector(config, None)\n\n def test_import(self):\n self.assertTrue(ExampleCollector)\n\n @patch.object(Collector, 'publish')\n def test(self, publish_mock):\n self.collector.collect()\n\n metrics = {\n 'my.example.metric': 42\n }\n\n self.setDocExample(collector=self.collector.__class__.__name__,\n metrics=metrics,\n defaultpath=self.collector.config['path'])\n self.assertPublishedMany(publish_mock, metrics)\n\n##########################################################################\nif __name__ == \"__main__\":\n unittest.main()\n\n"}}},{"rowIdx":389,"cells":{"text":{"kind":"string","value":"\nfrom collections import deque\nfrom functools import wraps\nimport logging\nfrom typing import Any\n\nfrom homeassistant.helpers.typing import HomeAssistantType\n\nfrom .const import ATTR_DISCOVERY_PAYLOAD, ATTR_DISCOVERY_TOPIC\nfrom .models import MessageCallbackType\n\n_LOGGER = logging.getLogger(__name__)\n\nDATA_MQTT_DEBUG_INFO = \"mqtt_debug_info\"\nSTORED_MESSAGES = 10\n\n\ndef log_messages(hass: HomeAssistantType, entity_id: str) -> MessageCallbackType:\n \"\"\"Wrap an MQTT message callback to support message logging.\"\"\"\n\n def _log_message(msg):\n \"\"\"Log message.\"\"\"\n debug_info = hass.data[DATA_MQTT_DEBUG_INFO]\n messages = debug_info[\"entities\"][entity_id][\"subscriptions\"][\n msg.subscribed_topic\n ][\"messages\"]\n if msg not in messages:\n messages.append(msg)\n\n def _decorator(msg_callback: MessageCallbackType):\n @wraps(msg_callback)\n def wrapper(msg: Any) -> None:\n \"\"\"Log message.\"\"\"\n _log_message(msg)\n msg_callback(msg)\n\n setattr(wrapper, \"__entity_id\", entity_id)\n return wrapper\n\n return _decorator\n\n\ndef add_subscription(hass, message_callback, subscription):\n \"\"\"Prepare debug data for subscription.\"\"\"\n entity_id = getattr(message_callback, \"__entity_id\", None)\n if entity_id:\n debug_info = hass.data.setdefault(\n DATA_MQTT_DEBUG_INFO, {\"entities\": {}, \"triggers\": {}}\n )\n entity_info = debug_info[\"entities\"].setdefault(\n entity_id, {\"subscriptions\": {}, \"discovery_data\": {}}\n )\n if subscription not in entity_info[\"subscriptions\"]:\n entity_info[\"subscriptions\"][subscription] = {\n \"count\": 0,\n \"messages\": deque([], STORED_MESSAGES),\n }\n entity_info[\"subscriptions\"][subscription][\"count\"] += 1\n\n\ndef remove_subscription(hass, message_callback, subscription):\n \"\"\"Remove debug data for subscription if it exists.\"\"\"\n entity_id = getattr(message_callback, \"__entity_id\", None)\n if entity_id and entity_id in hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"]:\n hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"][entity_id][\"subscriptions\"][\n subscription\n ][\"count\"] -= 1\n if not hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"][entity_id][\"subscriptions\"][\n subscription\n ][\"count\"]:\n hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"][entity_id][\"subscriptions\"].pop(\n subscription\n )\n\n\ndef add_entity_discovery_data(hass, discovery_data, entity_id):\n \"\"\"Add discovery data.\"\"\"\n debug_info = hass.data.setdefault(\n DATA_MQTT_DEBUG_INFO, {\"entities\": {}, \"triggers\": {}}\n )\n entity_info = debug_info[\"entities\"].setdefault(\n entity_id, {\"subscriptions\": {}, \"discovery_data\": {}}\n )\n entity_info[\"discovery_data\"] = discovery_data\n\n\ndef update_entity_discovery_data(hass, discovery_payload, entity_id):\n \"\"\"Update discovery data.\"\"\"\n entity_info = hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"][entity_id]\n entity_info[\"discovery_data\"][ATTR_DISCOVERY_PAYLOAD] = discovery_payload\n\n\ndef remove_entity_data(hass, entity_id):\n \"\"\"Remove discovery data.\"\"\"\n hass.data[DATA_MQTT_DEBUG_INFO][\"entities\"].pop(entity_id)\n\n\ndef add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):\n \"\"\"Add discovery data.\"\"\"\n debug_info = hass.data.setdefault(\n DATA_MQTT_DEBUG_INFO, {\"entities\": {}, \"triggers\": {}}\n )\n debug_info[\"triggers\"][discovery_hash] = {\n \"device_id\": device_id,\n \"discovery_data\": discovery_data,\n }\n\n\ndef update_trigger_discovery_data(hass, discovery_hash, discovery_payload):\n \"\"\"Update discovery data.\"\"\"\n trigger_info = hass.data[DATA_MQTT_DEBUG_INFO][\"triggers\"][discovery_hash]\n trigger_info[\"discovery_data\"][ATTR_DISCOVERY_PAYLOAD] = discovery_payload\n\n\ndef remove_trigger_discovery_data(hass, discovery_hash):\n \"\"\"Remove discovery data.\"\"\"\n hass.data[DATA_MQTT_DEBUG_INFO][\"triggers\"][discovery_hash][\"discovery_data\"] = None\n\n\nasync def info_for_device(hass, device_id):\n \"\"\"Get debug info for a device.\"\"\"\n mqtt_info = {\"entities\": [], \"triggers\": []}\n entity_registry = await hass.helpers.entity_registry.async_get_registry()\n\n entries = hass.helpers.entity_registry.async_entries_for_device(\n entity_registry, device_id\n )\n mqtt_debug_info = hass.data.setdefault(\n DATA_MQTT_DEBUG_INFO, {\"entities\": {}, \"triggers\": {}}\n )\n for entry in entries:\n if entry.entity_id not in mqtt_debug_info[\"entities\"]:\n continue\n\n entity_info = mqtt_debug_info[\"entities\"][entry.entity_id]\n subscriptions = [\n {\n \"topic\": topic,\n \"messages\": [\n {\n \"payload\": msg.payload,\n \"qos\": msg.qos,\n \"retain\": msg.retain,\n \"time\": msg.timestamp,\n \"topic\": msg.topic,\n }\n for msg in list(subscription[\"messages\"])\n ],\n }\n for topic, subscription in entity_info[\"subscriptions\"].items()\n ]\n discovery_data = {\n \"topic\": entity_info[\"discovery_data\"].get(ATTR_DISCOVERY_TOPIC, \"\"),\n \"payload\": entity_info[\"discovery_data\"].get(ATTR_DISCOVERY_PAYLOAD, \"\"),\n }\n mqtt_info[\"entities\"].append(\n {\n \"entity_id\": entry.entity_id,\n \"subscriptions\": subscriptions,\n \"discovery_data\": discovery_data,\n }\n )\n\n for trigger in mqtt_debug_info[\"triggers\"].values():\n if trigger[\"device_id\"] != device_id:\n continue\n\n discovery_data = {\n \"topic\": trigger[\"discovery_data\"][ATTR_DISCOVERY_TOPIC],\n \"payload\": trigger[\"discovery_data\"][ATTR_DISCOVERY_PAYLOAD],\n }\n mqtt_info[\"triggers\"].append({\"discovery_data\": discovery_data})\n\n return mqtt_info\n\n"}}},{"rowIdx":390,"cells":{"text":{"kind":"string","value":"\nimport os\nfrom os import path as op\n\ntitle = 'mne-python flow diagram'\n\nfont_face = 'Arial'\nnode_size = 12\nnode_small_size = 9\nedge_size = 9\nsensor_color = '#7bbeca'\nsource_color = '#ff6347'\n\nlegend = \"\"\"\n<\n\n\n\n
\nSensor (M/EEG) space
\nSource (brain) space
>\"\"\" % (edge_size, sensor_color, source_color)\nlegend = ''.join(legend.split('\\n'))\n\nnodes = dict(\n T1='T1',\n flashes='Flash5/30',\n trans='Head-MRI trans',\n recon='Freesurfer surfaces',\n bem='BEM',\n src='Source space\\nmne.SourceSpaces',\n cov='Noise covariance\\nmne.Covariance',\n fwd='Forward solution\\nmne.forward.Forward',\n inv='Inverse operator\\nmne.minimum_norm.InverseOperator',\n stc='Source estimate\\nmne.SourceEstimate',\n raw='Raw data\\nmne.io.Raw',\n epo='Epoched data\\nmne.Epochs',\n evo='Averaged data\\nmne.Evoked',\n pre='Preprocessed data\\nmne.io.Raw',\n legend=legend,\n)\n\nsensor_space = ('raw', 'pre', 'epo', 'evo', 'cov')\nsource_space = ('src', 'stc', 'bem', 'flashes', 'recon', 'T1')\n\nedges = (\n ('T1', 'recon'),\n ('flashes', 'bem'),\n ('recon', 'bem'),\n ('recon', 'src', 'mne.setup_source_space'),\n ('src', 'fwd'),\n ('bem', 'fwd'),\n ('trans', 'fwd', 'mne.make_forward_solution'),\n ('fwd', 'inv'),\n ('cov', 'inv', 'mne.make_inverse_operator'),\n ('inv', 'stc'),\n ('evo', 'stc', 'mne.minimum_norm.apply_inverse'),\n ('raw', 'pre', 'raw.filter\\n'\n 'mne.preprocessing.ICA\\n'\n 'mne.preprocessing.compute_proj_eog\\n'\n 'mne.preprocessing.compute_proj_ecg\\n'\n '...'),\n ('pre', 'epo', 'mne.Epochs'),\n ('epo', 'evo', 'epochs.average'),\n ('epo', 'cov', 'mne.compute_covariance'),\n)\n\nsubgraphs = (\n [('T1', 'flashes', 'recon', 'bem', 'src'),\n (''\n 'Freesurfer / MNE-C>' % node_small_size)],\n)\n\n\ndef setup(app):\n app.connect('builder-inited', generate_flow_diagram)\n app.add_config_value('make_flow_diagram', True, 'html')\n\n\ndef setup_module():\n # HACK: Stop nosetests running setup() above\n pass\n\n\ndef generate_flow_diagram(app):\n out_dir = op.join(app.builder.outdir, '_static')\n if not op.isdir(out_dir):\n os.makedirs(out_dir)\n out_fname = op.join(out_dir, 'mne-python_flow.svg')\n make_flow_diagram = app is None or \\\n bool(app.builder.config.make_flow_diagram)\n if not make_flow_diagram:\n print('Skipping flow diagram, webpage will have a missing image')\n return\n\n import pygraphviz as pgv\n g = pgv.AGraph(name=title, directed=True)\n\n for key, label in nodes.items():\n label = label.split('\\n')\n if len(label) > 1:\n label[0] = ('<' % node_size\n + label[0] + '')\n for li in range(1, len(label)):\n label[li] = ('' % node_small_size\n + label[li] + '')\n label[-1] = label[-1] + '>'\n label = '
'.join(label)\n else:\n label = label[0]\n g.add_node(key, shape='plaintext', label=label)\n\n # Create and customize nodes and edges\n for edge in edges:\n g.add_edge(*edge[:2])\n e = g.get_edge(*edge[:2])\n if len(edge) > 2:\n e.attr['label'] = ('<' +\n '
'.join(edge[2].split('\\n')) +\n '
>')\n e.attr['fontsize'] = edge_size\n\n # Change colors\n for these_nodes, color in zip((sensor_space, source_space),\n (sensor_color, source_color)):\n for node in these_nodes:\n g.get_node(node).attr['fillcolor'] = color\n g.get_node(node).attr['style'] = 'filled'\n\n # Create subgraphs\n for si, subgraph in enumerate(subgraphs):\n g.add_subgraph(subgraph[0], 'cluster%s' % si,\n label=subgraph[1], color='black')\n\n # Format (sub)graphs\n for gr in g.subgraphs() + [g]:\n for x in [gr.node_attr, gr.edge_attr]:\n x['fontname'] = font_face\n g.node_attr['shape'] = 'box'\n\n # A couple of special ones\n for ni, node in enumerate(('fwd', 'inv', 'trans')):\n node = g.get_node(node)\n node.attr['gradientangle'] = 270\n colors = (source_color, sensor_color)\n colors = colors if ni == 0 else colors[::-1]\n node.attr['fillcolor'] = ':'.join(colors)\n node.attr['style'] = 'filled'\n del node\n g.get_node('legend').attr.update(shape='plaintext', margin=0, rank='sink')\n # put legend in same rank/level as inverse\n leg = g.add_subgraph(['legend', 'inv'], name='legendy')\n leg.graph_attr['rank'] = 'same'\n\n g.layout('dot')\n g.draw(out_fname, format='svg')\n return g\n\n\n# This is useful for testing/iterating to see what the result looks like\nif __name__ == '__main__':\n from mne.io.constants import Bunch\n out_dir = op.abspath(op.join(op.dirname(__file__), '..', '_build', 'html'))\n app = Bunch(builder=Bunch(outdir=out_dir,\n config=Bunch(make_flow_diagram=True)))\n g = generate_flow_diagram(app)\n\n"}}},{"rowIdx":391,"cells":{"text":{"kind":"string","value":"\nfrom vine import promise, transform\n\nfrom kombu.asynchronous.aws.ext import AWSRequest, get_response\n\nfrom kombu.asynchronous.http import Headers, Request, get_client\n\nimport io\n\ntry: # pragma: no cover\n from email import message_from_bytes\n from email.mime.message import MIMEMessage\n\n # py3\n def message_from_headers(hdr): # noqa\n bs = \"\\r\\n\".join(\"{}: {}\".format(*h) for h in hdr)\n return message_from_bytes(bs.encode())\n\nexcept ImportError: # pragma: no cover\n from mimetools import Message as MIMEMessage # noqa\n\n # py2\n def message_from_headers(hdr): # noqa\n return io.BytesIO(b'\\r\\n'.join(\n b'{}: {}'.format(*h) for h in hdr\n ))\n\n__all__ = (\n 'AsyncHTTPSConnection', 'AsyncConnection',\n)\n\n\nclass AsyncHTTPResponse:\n \"\"\"Async HTTP Response.\"\"\"\n\n def __init__(self, response):\n self.response = response\n self._msg = None\n self.version = 10\n\n def read(self, *args, **kwargs):\n return self.response.body\n\n def getheader(self, name, default=None):\n return self.response.headers.get(name, default)\n\n def getheaders(self):\n return list(self.response.headers.items())\n\n @property\n def msg(self):\n if self._msg is None:\n self._msg = MIMEMessage(message_from_headers(self.getheaders()))\n return self._msg\n\n @property\n def status(self):\n return self.response.code\n\n @property\n def reason(self):\n if self.response.error:\n return self.response.error.message\n return ''\n\n def __repr__(self):\n return repr(self.response)\n\n\nclass AsyncHTTPSConnection:\n \"\"\"Async HTTP Connection.\"\"\"\n\n Request = Request\n Response = AsyncHTTPResponse\n\n method = 'GET'\n path = '/'\n body = None\n default_ports = {'http': 80, 'https': 443}\n\n def __init__(self, strict=None, timeout=20.0, http_client=None):\n self.headers = []\n self.timeout = timeout\n self.strict = strict\n self.http_client = http_client or get_client()\n\n def request(self, method, path, body=None, headers=None):\n self.path = path\n self.method = method\n if body is not None:\n try:\n read = body.read\n except AttributeError:\n self.body = body\n else:\n self.body = read()\n if headers is not None:\n self.headers.extend(list(headers.items()))\n\n def getrequest(self):\n headers = Headers(self.headers)\n return self.Request(self.path, method=self.method, headers=headers,\n body=self.body, connect_timeout=self.timeout,\n request_timeout=self.timeout, validate_cert=False)\n\n def getresponse(self, callback=None):\n request = self.getrequest()\n request.then(transform(self.Response, callback))\n return self.http_client.add_request(request)\n\n def set_debuglevel(self, level):\n pass\n\n def connect(self):\n pass\n\n def close(self):\n pass\n\n def putrequest(self, method, path):\n self.method = method\n self.path = path\n\n def putheader(self, header, value):\n self.headers.append((header, value))\n\n def endheaders(self):\n pass\n\n def send(self, data):\n if self.body:\n self.body += data\n else:\n self.body = data\n\n def __repr__(self):\n return f''\n\n\nclass AsyncConnection:\n \"\"\"Async AWS Connection.\"\"\"\n\n def __init__(self, sqs_connection, http_client=None, **kwargs): # noqa\n self.sqs_connection = sqs_connection\n self._httpclient = http_client or get_client()\n\n def get_http_connection(self):\n return AsyncHTTPSConnection(http_client=self._httpclient)\n\n def _mexe(self, request, sender=None, callback=None):\n callback = callback or promise()\n conn = self.get_http_connection()\n\n if callable(sender):\n sender(conn, request.method, request.path, request.body,\n request.headers, callback)\n else:\n conn.request(request.method, request.url,\n request.body, request.headers)\n conn.getresponse(callback=callback)\n return callback\n\n\nclass AsyncAWSQueryConnection(AsyncConnection):\n \"\"\"Async AWS Query Connection.\"\"\"\n\n STATUS_CODE_OK = 200\n STATUS_CODE_REQUEST_TIMEOUT = 408\n STATUS_CODE_NETWORK_CONNECT_TIMEOUT_ERROR = 599\n STATUS_CODE_INTERNAL_ERROR = 500\n STATUS_CODE_BAD_GATEWAY = 502\n STATUS_CODE_SERVICE_UNAVAILABLE_ERROR = 503\n STATUS_CODE_GATEWAY_TIMEOUT = 504\n\n STATUS_CODES_SERVER_ERRORS = (\n STATUS_CODE_INTERNAL_ERROR,\n STATUS_CODE_BAD_GATEWAY,\n STATUS_CODE_SERVICE_UNAVAILABLE_ERROR\n )\n\n STATUS_CODES_TIMEOUT = (\n STATUS_CODE_REQUEST_TIMEOUT,\n STATUS_CODE_NETWORK_CONNECT_TIMEOUT_ERROR,\n STATUS_CODE_GATEWAY_TIMEOUT\n )\n\n def __init__(self, sqs_connection, http_client=None,\n http_client_params=None, **kwargs):\n if not http_client_params:\n http_client_params = {}\n AsyncConnection.__init__(self, sqs_connection, http_client,\n **http_client_params)\n\n def make_request(self, operation, params_, path, verb, callback=None): # noqa\n params = params_.copy()\n if operation:\n params['Action'] = operation\n signer = self.sqs_connection._request_signer # noqa\n\n # defaults for non-get\n signing_type = 'standard'\n param_payload = {'data': params}\n if verb.lower() == 'get':\n # query-based opts\n signing_type = 'presignurl'\n param_payload = {'params': params}\n\n request = AWSRequest(method=verb, url=path, **param_payload)\n signer.sign(operation, request, signing_type=signing_type)\n prepared_request = request.prepare()\n\n return self._mexe(prepared_request, callback=callback)\n\n def get_list(self, operation, params, markers, path='/', parent=None, verb='POST', callback=None): # noqa\n return self.make_request(\n operation, params, path, verb,\n callback=transform(\n self._on_list_ready, callback, parent or self, markers,\n operation\n ),\n )\n\n def get_object(self, operation, params, path='/', parent=None, verb='GET', callback=None): # noqa\n return self.make_request(\n operation, params, path, verb,\n callback=transform(\n self._on_obj_ready, callback, parent or self, operation\n ),\n )\n\n def get_status(self, operation, params, path='/', parent=None, verb='GET', callback=None): # noqa\n return self.make_request(\n operation, params, path, verb,\n callback=transform(\n self._on_status_ready, callback, parent or self, operation\n ),\n )\n\n def _on_list_ready(self, parent, markers, operation, response): # noqa\n service_model = self.sqs_connection.meta.service_model\n if response.status == self.STATUS_CODE_OK:\n _, parsed = get_response(\n service_model.operation_model(operation), response.response\n )\n return parsed\n elif (\n response.status in self.STATUS_CODES_TIMEOUT or\n response.status in self.STATUS_CODES_SERVER_ERRORS\n ):\n # When the server returns a timeout or 50X server error,\n # the response is interpreted as an empty list.\n # This prevents hanging the Celery worker.\n return []\n else:\n raise self._for_status(response, response.read())\n\n def _on_obj_ready(self, parent, operation, response): # noqa\n service_model = self.sqs_connection.meta.service_model\n if response.status == self.STATUS_CODE_OK:\n _, parsed = get_response(\n service_model.operation_model(operation), response.response\n )\n return parsed\n else:\n raise self._for_status(response, response.read())\n\n def _on_status_ready(self, parent, operation, response): # noqa\n service_model = self.sqs_connection.meta.service_model\n if response.status == self.STATUS_CODE_OK:\n httpres, _ = get_response(\n service_model.operation_model(operation), response.response\n )\n return httpres.code\n else:\n raise self._for_status(response, response.read())\n\n def _for_status(self, response, body):\n context = 'Empty body' if not body else 'HTTP Error'\n return Exception(\"Request {} HTTP {} {} ({})\".format(\n context, response.status, response.reason, body\n ))\n\n"}}},{"rowIdx":392,"cells":{"text":{"kind":"string","value":"\nimport numpy as np\nimport unittest\n\nfrom chainer import testing\nfrom chainer.testing import attr\n\nfrom chainercv.datasets import ade20k_semantic_segmentation_label_names\nfrom chainercv.datasets import ADE20KSemanticSegmentationDataset\nfrom chainercv.datasets import ADE20KTestImageDataset\nfrom chainercv.utils import assert_is_semantic_segmentation_dataset\nfrom chainercv.utils.testing.assertions.assert_is_image import assert_is_image\n\n\n@testing.parameterize(\n {'split': 'train'},\n {'split': 'val'},\n)\nclass TestADE20KSemanticSegmentationDataset(unittest.TestCase):\n\n def setUp(self):\n self.dataset = ADE20KSemanticSegmentationDataset(split=self.split)\n\n @attr.slow\n def test_ade20k_dataset(self):\n assert_is_semantic_segmentation_dataset(\n self.dataset, len(ade20k_semantic_segmentation_label_names),\n n_example=10)\n\n\nclass TestADE20KTestImageDataset(unittest.TestCase):\n\n def setUp(self):\n self.dataset = ADE20KTestImageDataset()\n\n @attr.slow\n def test_ade20k_dataset(self):\n indices = np.random.permutation(np.arange(len(self.dataset)))\n for i in indices[:10]:\n img = self.dataset[i]\n assert_is_image(img, color=True)\n\n\ntesting.run_module(__name__, __file__)\n\n"}}},{"rowIdx":393,"cells":{"text":{"kind":"string","value":"\nfrom django.test import TestCase\n\nfrom weblate.auth.models import User\nfrom weblate.utils.markdown import get_mention_users, render_markdown\n\n\nclass MarkdownTestCase(TestCase):\n def test_link(self):\n self.assertEqual(\n '

link

\\n',\n render_markdown(\"[link](https://weblate.org/)\"),\n )\n\n def test_js(self):\n self.assertEqual(\n \"

link

\\n\", render_markdown('link')\n )\n\n def test_intra_emphasis(self):\n self.assertEqual(\n \"

foobarbaz

\\n\", render_markdown(\"foo**bar**baz\")\n )\n\n\nclass MarkdownMentionTestCase(TestCase):\n def test_mention(self):\n User.objects.create(username=\"testuser\", full_name=\"Full Name\")\n self.assertEqual(\n '

@testuser really?

\\n',\n render_markdown(\"@testuser really?\"),\n )\n\n def test_get_mentions(self):\n user = User.objects.create(username=\"testuser\", full_name=\"Full Name\")\n self.assertEqual(\n {user.pk},\n set(\n get_mention_users(\"@testuser, @invalid, @testuser\").values_list(\n \"pk\", flat=True\n )\n ),\n )\n\n def test_get_mentions_case_insentivite(self):\n user = User.objects.create(username=\"testuser\", full_name=\"Full Name\")\n self.assertEqual(\n {user.pk},\n set(\n get_mention_users(\"@testUser, @invalid, @Testuser\").values_list(\n \"pk\", flat=True\n )\n ),\n )\n\n"}}},{"rowIdx":394,"cells":{"text":{"kind":"string","value":"\nimport sys\nfrom contextlib import contextmanager\n\ntry:\n import urlparse\nexcept ImportError:\n # Python 3\n import urllib.parse as urlparse\n\n\n@contextmanager\ndef webserver(app, port=0, host=None):\n \"\"\"Context manager entry point for the 'with' statement.\n\n Pass 0 as port number to dynamically allocate a free port.\n\n Usage:\n\n with webserver(wsgi_app_function, 8080) as host_url:\n do_ws_calls(host_url)\n \"\"\"\n server = build_web_server(app, port, host or '127.0.0.1')\n host, port = server.socket.getsockname()\n\n import threading\n thread = threading.Thread(target=server.serve_forever,\n kwargs={'poll_interval': 0.5})\n thread.setDaemon(True)\n thread.start()\n try:\n yield 'http://%s:%s/' % (host, port) # yield control to 'with' body\n finally:\n server.shutdown()\n server.server_close()\n thread.join(timeout=1)\n\n\ntry:\n from SocketServer import ThreadingMixIn\nexcept ImportError:\n # Python 3\n from socketserver import ThreadingMixIn\n\nimport wsgiref.simple_server as wsgiserver\nclass WebServer(wsgiserver.WSGIServer, ThreadingMixIn):\n \"\"\"A web server that starts a new thread for each request.\n \"\"\"\n\n\nclass _RequestHandler(wsgiserver.WSGIRequestHandler):\n def get_stderr(self):\n # don't write to stderr\n return sys.stdout\n\n def log_message(self, format, *args):\n # message = \"wsmock(%s) %s\" % (self.address_string(), format % args)\n pass # don't log messages\n\n\ndef build_web_server(app, port, host=None):\n server = wsgiserver.make_server(\n host or '', port, app,\n server_class=WebServer,\n handler_class=_RequestHandler)\n return server\n\n\nclass HTTPRequestCollector(object):\n def __init__(self, response_data, response_code=200, headers=()):\n self.requests = []\n self.response_code = response_code\n self.response_data = response_data\n self.headers = list(headers or ())\n\n def __call__(self, environ, start_response):\n self.requests.append((\n environ.get('PATH_INFO'),\n urlparse.parse_qsl(environ.get('QUERY_STRING'))))\n start_response('%s OK' % self.response_code, self.headers)\n return [self.response_data]\n\n"}}},{"rowIdx":395,"cells":{"text":{"kind":"string","value":"\nimport html\nimport functools\nimport io\nimport os\nimport re\nimport sys\nimport uuid\nimport email.policy\nimport email.generator\nimport email.encoders\nimport email.mime.multipart\nimport email.message\nimport quopri\nfrom typing import MutableMapping, Set, Tuple\n\nimport attr\nfrom PyQt5.QtCore import QUrl\n\nfrom qutebrowser.browser import downloads\nfrom qutebrowser.browser.webkit import webkitelem\nfrom qutebrowser.utils import log, objreg, message, usertypes, utils, urlutils\nfrom qutebrowser.extensions import interceptors\n\n\n@attr.s\nclass _File:\n\n content = attr.ib()\n content_type = attr.ib()\n content_location = attr.ib()\n transfer_encoding = attr.ib()\n\n\n_CSS_URL_PATTERNS = [re.compile(x) for x in [\n r\"@import\\s+'(?P[^']+)'\",\n r'@import\\s+\"(?P[^\"]+)\"',\n r'''url\\((?P[^'\"][^)]*)\\)''',\n r'url\\(\"(?P[^\"]+)\"\\)',\n r\"url\\('(?P[^']+)'\\)\",\n]]\n\n\ndef _get_css_imports_regex(data):\n \"\"\"Return all assets that are referenced in the given CSS document.\n\n The returned URLs are relative to the stylesheet's URL.\n\n Args:\n data: The content of the stylesheet to scan as string.\n \"\"\"\n urls = []\n for pattern in _CSS_URL_PATTERNS:\n for match in pattern.finditer(data):\n url = match.group(\"url\")\n if url:\n urls.append(url)\n return urls\n\n\ndef _get_css_imports_cssutils(data, inline=False):\n \"\"\"Return all assets that are referenced in the given CSS document.\n\n The returned URLs are relative to the stylesheet's URL.\n\n Args:\n data: The content of the stylesheet to scan as string.\n inline: True if the argument is an inline HTML style attribute.\n \"\"\"\n try:\n import cssutils\n except ImportError:\n return None\n\n # We don't care about invalid CSS data, this will only litter the log\n # output with CSS errors\n parser = cssutils.CSSParser(loglevel=100,\n fetcher=lambda url: (None, \"\"), validate=False)\n if not inline:\n sheet = parser.parseString(data)\n return list(cssutils.getUrls(sheet))\n else:\n urls = []\n declaration = parser.parseStyle(data)\n # prop = background, color, margin, ...\n for prop in declaration:\n # value = red, 10px, url(foobar), ...\n for value in prop.propertyValue:\n if isinstance(value, cssutils.css.URIValue):\n if value.uri:\n urls.append(value.uri)\n return urls\n\n\ndef _get_css_imports(data, inline=False):\n \"\"\"Return all assets that are referenced in the given CSS document.\n\n The returned URLs are relative to the stylesheet's URL.\n\n Args:\n data: The content of the stylesheet to scan as string.\n inline: True if the argument is an inline HTML style attribute.\n \"\"\"\n imports = _get_css_imports_cssutils(data, inline)\n if imports is None:\n imports = _get_css_imports_regex(data)\n return imports\n\n\ndef _check_rel(element):\n \"\"\"Return true if the element's rel attribute fits our criteria.\n\n rel has to contain 'stylesheet' or 'icon'. Also returns True if the rel\n attribute is unset.\n\n Args:\n element: The WebElementWrapper which should be checked.\n \"\"\"\n if 'rel' not in element:\n return True\n must_have = {'stylesheet', 'icon'}\n rels = [rel.lower() for rel in element['rel'].split(' ')]\n return any(rel in rels for rel in must_have)\n\n\ndef _encode_quopri_mhtml(msg):\n \"\"\"Encode the message's payload in quoted-printable.\n\n Substitute for quopri's default 'encode_quopri' method, which needlessly\n encodes all spaces and tabs, instead of only those at the end on the\n line.\n\n Args:\n msg: Email message to quote.\n \"\"\"\n orig = msg.get_payload(decode=True)\n encdata = quopri.encodestring(orig, quotetabs=False)\n msg.set_payload(encdata)\n msg['Content-Transfer-Encoding'] = 'quoted-printable'\n\n\nMHTMLPolicy = email.policy.default.clone(linesep='\\r\\n', max_line_length=0)\n\n\n# Encode the file using base64 encoding.\nE_BASE64 = email.encoders.encode_base64\n\n\n# Encode the file using MIME quoted-printable encoding.\nE_QUOPRI = _encode_quopri_mhtml\n\n\nclass MHTMLWriter:\n\n \"\"\"A class for outputting multiple files to an MHTML document.\n\n Attributes:\n root_content: The root content as bytes.\n content_location: The url of the page as str.\n content_type: The MIME-type of the root content as str.\n _files: Mapping of location->_File object.\n \"\"\"\n\n def __init__(self, root_content, content_location, content_type):\n self.root_content = root_content\n self.content_location = content_location\n self.content_type = content_type\n self._files: MutableMapping[QUrl, _File] = {}\n\n def add_file(self, location, content, content_type=None,\n transfer_encoding=E_QUOPRI):\n \"\"\"Add a file to the given MHTML collection.\n\n Args:\n location: The original location (URL) of the file.\n content: The binary content of the file.\n content_type: The MIME-type of the content (if available)\n transfer_encoding: The transfer encoding to use for this file.\n \"\"\"\n self._files[location] = _File(\n content=content, content_type=content_type,\n content_location=location, transfer_encoding=transfer_encoding,\n )\n\n def write_to(self, fp):\n \"\"\"Output the MHTML file to the given file-like object.\n\n Args:\n fp: The file-object, opened in \"wb\" mode.\n \"\"\"\n msg = email.mime.multipart.MIMEMultipart(\n 'related', '---=_qute-{}'.format(uuid.uuid4()))\n\n root = self._create_root_file()\n msg.attach(root)\n\n for _, file_data in sorted(self._files.items()):\n msg.attach(self._create_file(file_data))\n\n gen = email.generator.BytesGenerator(fp, policy=MHTMLPolicy)\n gen.flatten(msg)\n\n def _create_root_file(self):\n \"\"\"Return the root document as MIMEMultipart.\"\"\"\n root_file = _File(\n content=self.root_content, content_type=self.content_type,\n content_location=self.content_location, transfer_encoding=E_QUOPRI,\n )\n return self._create_file(root_file)\n\n def _create_file(self, f):\n \"\"\"Return the single given file as email.message.Message.\"\"\"\n msg = email.message.Message()\n msg['MIME-Version'] = '1.0'\n msg['Content-Location'] = f.content_location\n if f.content_type:\n msg.set_type(f.content_type)\n msg.set_payload(f.content)\n f.transfer_encoding(msg)\n return msg\n\n\n_PendingDownloadType = Set[Tuple[QUrl, downloads.AbstractDownloadItem]]\n\n\nclass _Downloader:\n\n \"\"\"A class to download whole websites.\n\n Attributes:\n tab: The AbstractTab which contains the website that will be saved.\n target: DownloadTarget where the file should be downloaded to.\n writer: The MHTMLWriter object which is used to save the page.\n loaded_urls: A set of QUrls of finished asset downloads.\n pending_downloads: A set of unfinished (url, DownloadItem) tuples.\n _finished_file: A flag indicating if the file has already been\n written.\n _used: A flag indicating if the downloader has already been used.\n \"\"\"\n\n def __init__(self, tab, target):\n self.tab = tab\n self.target = target\n self.writer = None\n self.loaded_urls = {tab.url()}\n self.pending_downloads: _PendingDownloadType = set()\n self._finished_file = False\n self._used = False\n\n def run(self):\n \"\"\"Download and save the page.\n\n The object must not be reused, you should create a new one if\n you want to download another page.\n \"\"\"\n if self._used:\n raise ValueError(\"Downloader already used\")\n self._used = True\n web_url = self.tab.url()\n\n # FIXME:qtwebengine have a proper API for this\n page = self.tab._widget.page() # pylint: disable=protected-access\n web_frame = page.mainFrame()\n\n self.writer = MHTMLWriter(\n web_frame.toHtml().encode('utf-8'),\n content_location=urlutils.encoded_url(web_url),\n # I've found no way of getting the content type of a QWebView, but\n # since we're using .toHtml, it's probably safe to say that the\n # content-type is HTML\n content_type='text/html; charset=\"UTF-8\"',\n )\n # Currently only downloading (stylesheets),