code
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
226
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
|
|---|---|---|---|---|---|
__version__ = "1.0.3"
|
flashingpumpkin/filerotate
|
filerotate/__version__.py
|
Python
|
mit
| 21
|
"""@file dblstm_capsnet.py
contains the DBLSTMCapsNet class"""
import tensorflow as tf
import model
from nabu.neuralnetworks.components import layer, ops
import pdb
class DBLSTMCapsNet(model.Model):
"""A capsule network with bidirectional recurrent LSTM"""
def _get_outputs(self, inputs, input_seq_length, is_training):
"""
Create the variables and do the forward computation
Args:
inputs: the inputs to the neural network, this is a list of
[batch_size x time x ...] tensors
input_seq_length: The sequence lengths of the input utterances, this
is a [batch_size] vector
is_training: whether or not the network is in training mode
Returns:
- output, which is a [batch_size x time x ...] tensors
"""
num_capsules = int(self.conf['num_capsules'])
capsule_dim=int(self.conf['capsule_dim'])
routing_iters=int(self.conf['routing_iters'])
if 'recurrent_probability_fn' in self.conf:
if self.conf['recurrent_probability_fn'] == 'sigmoid':
recurrent_probability_fn = tf.nn.sigmoid
elif self.conf['recurrent_probability_fn'] == 'unit':
recurrent_probability_fn = ops.unit_activation
else:
recurrent_probability_fn = None
if 'accumulate_input_logits' in self.conf and self.conf['accumulate_input_logits']=='False':
accumulate_input_logits = False
else:
accumulate_input_logits = True
if 'accumulate_state_logits' in self.conf and self.conf['accumulate_state_logits']=='False':
accumulate_state_logits = False
else:
accumulate_state_logits = True
if 'logits_prior' in self.conf and self.conf['logits_prior']=='True':
logits_prior = True
else:
logits_prior = False
gates_fc = self.conf['gates_fc'] == 'True'
use_output_matrix = self.conf['use_output_matrix'] == 'True'
# code not available for multiple inputs!!
if len(inputs) > 1:
raise 'The implementation of CapsNet expects 1 input and not %d' %len(inputs)
else:
inputs=inputs[0]
with tf.variable_scope(self.scope):
if is_training and float(self.conf['input_noise']) > 0:
inputs = inputs + tf.random_normal(
tf.shape(inputs),
stddev=float(self.conf['input_noise']))
# Primary capsule.
with tf.variable_scope('primary_capsule'):
output = tf.identity(inputs, 'inputs')
input_seq_length = tf.identity(input_seq_length, 'input_seq_length')
# First layer is simple bidirectional rnn layer, without activation (squash activation
# will be applied later)
primary_output_dim = num_capsules*capsule_dim
primary_capsules_layer = layer.BLSTMLayer(num_units=primary_output_dim, linear_out_flag=True)
primary_capsules = primary_capsules_layer(output, input_seq_length)
primary_capsules = tf.reshape(primary_capsules, [output.shape[0].value, tf.shape(output)[1],
num_capsules*2, capsule_dim])
primary_capsules = ops.squash(primary_capsules)
output = tf.identity(primary_capsules, 'primary_capsules')
# non-primary capsules
for l in range(1, int(self.conf['num_layers'])):
with tf.variable_scope('layer%d' % l):
# a capsule layer
caps_blstm_layer = layer.BLSTMCapsuleLayer(num_capsules=num_capsules, capsule_dim=capsule_dim,
routing_iters=routing_iters,
recurrent_probability_fn=recurrent_probability_fn,
logits_prior=logits_prior,
accumulate_input_logits=accumulate_input_logits,
accumulate_state_logits=accumulate_state_logits,
gates_fc=gates_fc,
use_output_matrix=use_output_matrix)
output = caps_blstm_layer(output, input_seq_length)
if is_training and float(self.conf['dropout']) < 1:
output = tf.nn.dropout(output, float(self.conf['dropout']))
output_dim = num_capsules*2*capsule_dim
output = tf.reshape(output, [output.shape[0].value, tf.shape(output)[1], output_dim])
return output
|
JeroenZegers/Nabu-MSSS
|
nabu/neuralnetworks/models/dblstm_capsnet.py
|
Python
|
mit
| 3,928
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Filters(Model):
"""A key-value object consisting of filters that may be specified to limit the
results returned by the API. Current available filters: site.
:param site: The URL of the site to return similar images and similar
products from. (e.g., "www.bing.com", "bing.com").
:type site: str
"""
_attribute_map = {
'site': {'key': 'site', 'type': 'str'},
}
def __init__(self, *, site: str=None, **kwargs) -> None:
super(Filters, self).__init__(**kwargs)
self.site = site
|
Azure/azure-sdk-for-python
|
sdk/cognitiveservices/azure-cognitiveservices-search-visualsearch/azure/cognitiveservices/search/visualsearch/models/filters_py3.py
|
Python
|
mit
| 1,058
|
from ajax_select import get_lookup
from django import forms
from django.core.urlresolvers import reverse
from django.forms.util import flatatt
from django.template.defaultfilters import escapejs
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
class AutoCompleteSelectWidget(forms.widgets.TextInput):
""" widget to select a model """
add_link = None
def __init__(self,
channel,
help_text='',
*args, **kw):
super(forms.widgets.TextInput, self).__init__(*args, **kw)
self.channel = channel
self.help_text = help_text
def render(self, name, value, attrs=None):
value = value or ''
final_attrs = self.build_attrs(attrs, name=name)
self.html_id = final_attrs.pop('pk', name)
lookup = get_lookup(self.channel)
if value:
current_result = mark_safe(lookup.format_result( lookup.get_objects([value])[0] ))
else:
current_result = ''
context = {
'name': name,
'html_id' : self.html_id,
'lookup_url': reverse('ajax_lookup',kwargs={'channel':self.channel}),
'current_id': value,
'current_result': current_result,
'help_text': self.help_text,
'extra_attrs': mark_safe(flatatt(final_attrs)),
'func_slug': self.html_id.replace("-",""),
'add_link' : self.add_link,
'admin_media_prefix' : settings.ADMIN_MEDIA_PREFIX
}
return mark_safe(render_to_string(('autocompleteselect_%s.html' % self.channel, 'autocompleteselect.html'),context))
def value_from_datadict(self, data, files, name):
got = data.get(name, None)
if got:
return long(got)
else:
return None
class AutoCompleteSelectField(forms.fields.CharField):
""" form field to select a model for a ForeignKey db field """
channel = None
def __init__(self, channel, *args, **kwargs):
self.channel = channel
widget = kwargs.get("widget", False)
if not widget or not isinstance(widget, AutoCompleteSelectWidget):
kwargs["widget"] = AutoCompleteSelectWidget(channel=channel,help_text=kwargs.get('help_text',_('Enter text to search.')))
super(AutoCompleteSelectField, self).__init__(max_length=255,*args, **kwargs)
def clean(self, value):
if value:
lookup = get_lookup(self.channel)
objs = lookup.get_objects( [ value] )
if len(objs) != 1:
# someone else might have deleted it while you were editing
# or your channel is faulty
# out of the scope of this field to do anything more than tell you it doesn't exist
raise forms.ValidationError(u"The selected item does not exist.")
return objs[0]
else:
if self.required:
raise forms.ValidationError(self.error_messages['required'])
return None
def check_can_add(self,user,model):
_check_can_add(self,user,model)
class AutoCompleteSelectMultipleWidget(forms.widgets.SelectMultiple):
""" widget to select multiple models """
add_link = None
def __init__(self,
channel,
help_text='',
*args, **kwargs):
super(AutoCompleteSelectMultipleWidget, self).__init__(*args, **kwargs)
self.channel = channel
self.help_text = help_text
def render(self, name, value, attrs=None):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, name=name)
self.html_id = final_attrs.pop('pk', name)
lookup = get_lookup(self.channel)
current_name = "" # the text field starts empty
# eg. value = [3002L, 1194L]
if value:
current_ids = "|" + "|".join( str(pk) for pk in value ) + "|" # |pk|pk| of current
else:
current_ids = "|"
objects = lookup.get_objects(value)
# text repr of currently selected items
current_repr_json = []
for obj in objects:
repr = lookup.format_item(obj)
current_repr_json.append( """new Array("%s",%s)""" % (escapejs(repr),obj.pk) )
current_reprs = mark_safe("new Array(%s)" % ",".join(current_repr_json))
context = {
'name':name,
'html_id':self.html_id,
'lookup_url':reverse('ajax_lookup',kwargs={'channel':self.channel}),
'current':value,
'current_name':current_name,
'current_ids':current_ids,
'current_reprs':current_reprs,
'help_text':self.help_text,
'extra_attrs': mark_safe(flatatt(final_attrs)),
'func_slug': self.html_id.replace("-",""),
'add_link' : self.add_link,
'admin_media_prefix' : settings.ADMIN_MEDIA_PREFIX
}
return mark_safe(render_to_string(('autocompleteselectmultiple_%s.html' % self.channel, 'autocompleteselectmultiple.html'),context))
def value_from_datadict(self, data, files, name):
# eg. u'members': [u'|229|4688|190|']
return [long(val) for val in data.get(name,'').split('|') if val]
class AutoCompleteSelectMultipleField(forms.fields.CharField):
""" form field to select multiple models for a ManyToMany db field """
channel = None
def __init__(self, channel, *args, **kwargs):
self.channel = channel
kwargs['widget'] = AutoCompleteSelectMultipleWidget(channel=channel,help_text=kwargs.get('help_text',_('Enter text to search.')))
super(AutoCompleteSelectMultipleField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value and self.required:
raise forms.ValidationError(self.error_messages['required'])
return value # a list of IDs from widget value_from_datadict
def check_can_add(self,user,model):
_check_can_add(self,user,model)
class AutoCompleteWidget(forms.TextInput):
"""
Widget to select a search result and enter the result as raw text in the text input field.
the user may also simply enter text and ignore any auto complete suggestions.
"""
channel = None
help_text = ''
html_id = ''
def __init__(self, channel, *args, **kwargs):
self.channel = channel
self.help_text = kwargs.pop('help_text', '')
super(AutoCompleteWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
value = value or ''
final_attrs = self.build_attrs(attrs, name=name)
self.html_id = final_attrs.pop('pk', name)
context = {
'current_name': value,
'current_id': value,
'help_text': self.help_text,
'html_id': self.html_id,
'lookup_url': reverse('ajax_lookup', args=[self.channel]),
'name': name,
'extra_attrs':mark_safe(flatatt(final_attrs)),
'func_slug': self.html_id.replace("-","")
}
templates = ('autocomplete_%s.html' % self.channel,
'autocomplete.html')
return mark_safe(render_to_string(templates, context))
class AutoCompleteField(forms.CharField):
"""
Field uses an AutoCompleteWidget to lookup possible completions using a channel and stores raw text (not a foreign key)
"""
channel = None
def __init__(self, channel, *args, **kwargs):
self.channel = channel
widget = AutoCompleteWidget(channel,help_text=kwargs.get('help_text', _('Enter text to search.')))
defaults = {'max_length': 255,'widget': widget}
defaults.update(kwargs)
super(AutoCompleteField, self).__init__(*args, **defaults)
def _check_can_add(self,user,model):
""" check if the user can add the model, deferring first to the channel if it implements can_add() \
else using django's default perm check. \
if it can add, then enable the widget to show the + link """
lookup = get_lookup(self.channel)
try:
can_add = lookup.can_add(user,model)
except AttributeError:
ctype = ContentType.objects.get_for_model(model)
can_add = user.has_perm("%s.view_%s" % (ctype.app_label,ctype.model))
if can_add:
self.widget.add_link = reverse('add_popup',kwargs={'app_label':model._meta.app_label,'model':model._meta.object_name.lower()})
def autoselect_fields_check_can_add(form,model,user):
""" check the form's fields for any autoselect fields and enable their widgets with + sign add links if permissions allow"""
for name,form_field in form.declared_fields.iteritems():
if isinstance(form_field,(AutoCompleteSelectMultipleField,AutoCompleteSelectField)):
db_field = model._meta.get_field_by_name(name)[0]
form_field.check_can_add(user,db_field.rel.to)
|
caseywstark/colab
|
colab/apps/ajax_select/fields.py
|
Python
|
mit
| 9,217
|
import os
import numpy as np
import creators
from utils import schema_split_helper
BASE_DIR = os.path.dirname( os.path.realpath(__file__) )
''' Load datasets from disk. '''
def load_gd(name, schema=False, split=False):
GDIR = os.path.join(BASE_DIR, 'gens.domingos/')
trn = np.loadtxt(GDIR+name+'.ts.data', delimiter=',')
vld = np.loadtxt(GDIR+name+'.valid.data', delimiter=',')
tst = np.loadtxt(GDIR+name+'.test.data', delimiter=',')
assert trn.shape[1] == vld.shape[1] == tst.shape[1]
assert trn.ndim == vld.ndim == tst.ndim == 2
n1, n2, n3 = len(trn), len(vld), len(tst)
n = n1 + n2 + n3
m = trn.shape[1]
data = np.zeros((n,m))
data[:n1,:] = trn
data[n1:n1+n2,:] = vld
data[n1+n2:,:] = tst
return schema_split_helper(data, schema, split, n1, n2, [2]*m)
def load_mnperm(n, schema=False, split=False):
assert 2 <= n <= 12
filename = os.path.join(BASE_DIR, 'permanent/mnperm.%.2d.npy'%n)
try:
data = np.load(filename)
except IOError, e:
data = creators.create_mnperm(n)
np.save(filename, data)
m1 = max(1, int(round(data.shape[0]/1.3)))
m2 = max(1, int(round(m1 * 0.1)))
return schema_split_helper(data, schema, split, m1, m2, [n]*n)
def gd_function_creator(name):
def gd_data(schema=False, split=False):
return load_gd(name, schema, split)
return gd_data
def mnperm_function_creator(n):
def mnperm_data(schema=False, split=False):
return load_mnperm(n, schema, split)
return mnperm_data
|
awd4/spnss
|
datasets/loaders.py
|
Python
|
mit
| 1,541
|
#!/usr/bin/env python
import csv
import difflib
try:
from settings import FIELD_SEP
except ImportError:
FIELD_SEP = '\t'
class TaxonIndex():
"""
TaxonIndex is a class for reading a taxon dictionary file (which must be
in the form of a tab-separated CSV text file), and matching genera and taxa
against that dictionary using a fuzzy-matching algorithm to deal with
spelling errors.
"""
# Todo: handle if taxonID in fieldnames but not provided for a row
# Todo: does this work with Unicode files?
# Todo: sort the genus lists
def __init__(self,csvfile,delimiter=FIELD_SEP):
self.taxonindex = dict()
self.genusindex = dict()
self.idindex = dict()
self._taxontest = dict()
validheaders = set(['scientificName','taxonID','taxonomicStatus',
'relatedResourceID'])
with open(csvfile,'rb') as f:
try:
dialect = csv.Sniffer().sniff(f.read(2048),delimiters=delimiter)
f.seek(0)
self.reader = csv.DictReader(f, dialect=dialect)
except csv.Error:
f.seek(0)
self.reader = csv.DictReader(f)
self.fieldnames = self.reader.fieldnames
if 'scientificName' in self.fieldnames:
for r in self.reader:
if len(r) != len(self.fieldnames):
raise csv.Error("Number of fields should be "
"%s: %s" % (len(self.fieldnames),str(r)))
self.taxonindex[r['scientificName']] = {k:v for k,v in \
r.items() if k in validheaders-set(['scientificName'])}
if 'taxonID' not in self.fieldnames :
self.taxonindex[r['scientificName']]['taxonID'] = \
r['scientificName']
else:
self.idindex[r['taxonID']] = \
{k:v for k,v in r.items() if k in validheaders-
set(['taxonID'])}
try:
self.genusindex[r['scientificName'].split(' ')[0].\
strip().capitalize()] += [r['scientificName']]
except KeyError:
self.genusindex[r['scientificName'].split(' ')[0].\
strip().capitalize()] = [r['scientificName']]
else:
raise csv.Error("CSV Error: headers must include at least "
"'scientificName'. Current headers: %s" % str(self.fieldnames))
self._taxontest = {n.strip().lower():n for n in self.taxonindex}
def matchgenera(self,genus,n=1,sensitivity=0.85):
"""Returns up to n genera which are similar to the genus of the name
provided.
"""
#Assumes first word is genus
test = genus.strip().split(' ')[0].capitalize()
return difflib.get_close_matches(test,self.genusindex.keys()
,n,sensitivity)
def matchtaxa(self,t,genus=None,n=1,sensitivity=0.65):
"""Returns up to n taxa which have a similar name to the one
provided. If genus is provided, limits search to that genus.
"""
test = t.strip().lower()
if genus == None:
results = difflib.get_close_matches(test,self._taxontest,n,
sensitivity)
else:
glist = [t.lower() for t in self.genusindex[genus]]
results = difflib.get_close_matches(test,glist,n,sensitivity)
return [self._taxontest[r] for r in results]
def ratio(t1,t2):
"""Returns the closeness of the match between two taxon names, with 1 being
exact.
"""
t1 = t1.strip().lower()
t2 = t2.strip().lower()
return difflib.SequenceMatcher(None,t1,t2).ratio()
if __name__=='__main__':
dict1 = TaxonIndex('test/sn_dict')
dict2 = TaxonIndex('test/id_sn_dict')
print("sn_dict:")
for k,v in dict1.taxonindex.items():
print(k + ": " + str(v))
print("\nid_sn_dict:")
for k,v in dict2.taxonindex.items():
print(k + ": " + str(v))
print
print dict1.matchtaxa('THALASSARCH CHLORORYNCHOS',1,0.9)
|
rudivs/TaxonLinker
|
taxonutils.py
|
Python
|
mit
| 4,328
|
"""Screen database."""
import redis_client
import control
import re
from twisted.internet import defer
class ScreenDB(object):
"""A screen database."""
def __init__(self):
"""Default constructor."""
pass
def set_mode(self, screen, mode):
redis_client.connection.set('screen:{0}:mode'.format(screen),
mode)
redis_client.connection.publish('screen:update', 'update')
def set_override(self, screen, override):
if override is not None:
redis_client.connection.set('screen:{0}:override'.format(screen),
override)
else:
redis_client.connection.delete('screen:{0}:override'.format(screen))
redis_client.connection.publish('screen:update', 'update')
@defer.inlineCallbacks
def list(self):
screens = yield redis_client.connection.keys('screen:*:mode')
entries = {}
for screen in screens:
screenID = screen.split(':')[1]
mode = yield redis_client.connection.get('screen:{0}:mode'.format(screenID))
host = yield redis_client.connection.get('screen:{0}:host'.format(screenID))
entries[screenID] = {'mode': mode,
'host': host}
defer.returnValue(entries)
screens = ScreenDB()
@control.handler('screen-list')
@defer.inlineCallbacks
def perform_screen_list(responder, options):
screen_list = yield screens.list()
for screen, settings in screen_list.iteritems():
if settings['host'] is None:
online_string = 'offline'
else:
online_string = 'online from {0} port {1}'.format(*settings['host'].split(' '))
responder('{0} - {1} ({2})'.format(screen,
settings['mode'],
online_string))
@control.handler('screen-set-mode')
def perform_screen_set_mode(responder, options):
screens.set_mode(options['<id>'], options['<mode>'])
responder('Mode set.')
@control.handler('screen-override')
def perform_screen_override(responder, options):
screens.set_override(options['<id>'], options['<message>'])
responder('Override set.')
@control.handler('screen-clear-override')
def perform_screen_clear_override(responder, options):
screens.set_override(options['<id>'], None)
responder('Override cleared.')
def got_screen(name):
control.broadcast('Screen connected: {0}'.format(name))
redis_client.add_subscribe('screen:connect', got_screen)
|
prophile/compd
|
src/screen_db.py
|
Python
|
mit
| 2,580
|
"""
Build and install the project.
"""
from setuptools import find_packages, setup
NAME = "HinetPy"
AUTHOR = "Dongdong Tian"
AUTHOR_EMAIL = "[email protected]"
LICENSE = "MIT License"
URL = "https://github.com/seisman/HinetPy"
DESCRIPTION = (
"A Python package to request and process seismic waveform data from NIED Hi-net"
)
KEYWORDS = "Seismology, NIED, Hi-net, Waveform"
with open("README.rst", "r", encoding="utf8") as f:
LONG_DESCRIPTION = "".join(f.readlines())
PACKAGES = find_packages(exclude=["docs", "tests"])
SCRIPTS = []
CLASSIFIERS = [
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"Intended Audience :: Education",
f"License :: OSI Approved :: {LICENSE}",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Utilities",
]
INSTALL_REQUIRES = ["requests"]
# Configuration for setuptools-scm
SETUP_REQUIRES = ["setuptools_scm"]
USE_SCM_VERSION = {"local_scheme": "node-and-date", "fallback_version": "unknown"}
if __name__ == "__main__":
setup(
name=NAME,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
use_scm_version=USE_SCM_VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
scripts=SCRIPTS,
packages=PACKAGES,
classifiers=CLASSIFIERS,
keywords=KEYWORDS,
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
)
|
seisman/HinetPy
|
setup.py
|
Python
|
mit
| 1,864
|
import logging
from kitnirc.client import Channel
from kitnirc.modular import Module
from kitnirc.user import User
_log = logging.getLogger(__name__)
def is_admin(controller, client, actor):
"""Used to determine whether someone issuing a command is an admin.
By default, checks to see if there's a line of the type nick=host that
matches the command's actor in the [admins] section of the config file,
or a key that matches the entire mask (e.g. "foo@bar" or "foo@bar=1").
"""
config = controller.config
if not config.has_section("admins"):
logging.debug("Ignoring is_admin check - no [admins] config found.")
return False
for key,val in config.items("admins"):
if actor == User(key):
logging.debug("is_admin: %r matches admin %r", actor, key)
return True
if actor.nick.lower() == key.lower() and actor.host.lower() == val.lower():
logging.debug("is_admin: %r matches admin %r=%r", actor, key, val)
return True
logging.debug("is_admin: %r is not an admin.", actor)
return False
class AdminModule(Module):
"""A KitnIRC module which provides admin functionality.
Customization of what an "admin" is can be done by overriding the
is_admin global function in this file.
"""
@Module.handle("PRIVMSG")
def privmsg(self, client, actor, recipient, message):
if isinstance(recipient, Channel):
# Only pay attention if addressed directly in channels
if not message.startswith("%s:" % client.user.nick):
return
message = message.split(":", 1)[1]
message = message.strip()
args = message.split()
# Ignore empty messages
if not args:
return
command, args = args[0], args[1:]
command = command.lower()
available_commands = {
'join': self.join,
'part': self.part,
'quit': self.quit,
'reload': self.reload,
'reloadall': self.reloadall,
'load': self.load,
'unload': self.unload,
}
# Only pay attention to valid commands
func = available_commands.get(command)
if not func:
return
# Only pay attention to admins
actor = User(actor)
if not is_admin(self.controller, client, actor):
client.reply(recipient, actor, "You are not allowed to do that.")
return
result = func(client, args)
if result is True:
client.reply(recipient, actor, "Okay.")
elif result is False:
client.reply(recipient, actor, "Sorry, try again.")
# Suppress further handling of the PRIVMSG event.
return True
def join(self, client, args):
if not args:
return False
if client.join(args[0], args[1] if len(args) > 1 else None):
return True
else:
return False
def part(self, client, args):
if not args:
return False
if client.part(args[0]):
return True
else:
return False
def quit(self, client, args):
# We immediately disconnect, so no reply
client.quit()
def reload(self, client, args):
if not args:
return False
return all(self.controller.reload_module(mod) for mod in args)
def reloadall(self, client, args):
return self.controller.reload_modules()
def load(self, client, args):
if not args:
return False
return self.controller.load_module(args[0])
def unload(self, client, args):
if not args:
return False
return self.controller.unload_module(args[0])
module = AdminModule
# vim: set ts=4 sts=4 sw=4 et:
|
ayust/kitnirc
|
kitnirc/contrib/admintools.py
|
Python
|
mit
| 3,850
|
# -*- coding: utf-8 -*-
# gthnk (c) Ian Dennis Miller
import os
import flask
import logging
from flaskext.markdown import Markdown
from mdx_linkify.mdx_linkify import LinkifyExtension
from mdx_journal import JournalExtension
from . import db, login_manager, bcrypt
from .models.day import Day
from .models.entry import Entry
from .models.page import Page
from .models.user import User
def create_app():
app = flask.Flask(__name__)
try:
app.config.from_envvar('SETTINGS')
except RuntimeError:
default_filename = os.path.expanduser('~/.gthnk/gthnk.conf')
if os.path.isfile(default_filename):
print("WARN: using default configuration file ~/.gthnk/gthnk.conf")
app.config.from_pyfile(default_filename)
logging.basicConfig(
format='%(asctime)s %(module)-16s %(levelname)-8s %(message)s',
filename=app.config["LOG"],
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)
logging.info("Server: Start")
logging.info("Database: {}".format(app.config['SQLALCHEMY_DATABASE_URI']))
from .blueprints.root import root
app.register_blueprint(root)
from .blueprints.auth import auth
app.register_blueprint(auth)
from .blueprints.day import day
app.register_blueprint(day)
# from .blueprints.attachments import attachments
# app.register_blueprint(attachments)
db.init_app(app)
login_manager.init_app(app)
bcrypt.init_app(app)
app.markdown = Markdown(app, extensions=[
LinkifyExtension(),
JournalExtension()
])
return app
app = create_app()
|
iandennismiller/gthnk
|
src/gthnk/server.py
|
Python
|
mit
| 1,615
|
import logging
import os
from settings import CONVERSIONS, LOGGING_KWARGS, XSL_PATH
logger = logging.getLogger(__name__)
def print_xsl_files():
for index, parts in enumerate(CONVERSIONS):
file_path = os.path.join(XSL_PATH, parts[0])
print('{}: {}'.format(index + 1, file_path))
if '__main__' == __name__:
logging.basicConfig(**LOGGING_KWARGS)
print_xsl_files()
|
AustralianAntarcticDataCentre/metadata_xml_convert
|
print_xsl.py
|
Python
|
mit
| 376
|
"""Tests for issue previously raised and fixed, so we can be alerted if they start failing again."""
import pytest
from geomeppy.geom.polygons import Polygon3D
from geomeppy.geom.surfaces import set_coords
@pytest.fixture
def shadow_matching():
shadow_blocks = [
{
"name": "PN1001_Bld1000",
"coordinates": [
(-83637.73039999977, -100993.7087999992),
(-83639.28569999989, -101015.82459999993),
(-83653.77890000027, -101007.15670000017),
(-83652.75889999978, -100992.65210000053),
(-83637.73039999977, -100993.7087999992),
],
"height": 21.0,
},
{
"name": "PN1001_Bld1001",
"coordinates": [
(-83636.50970000029, -100976.35019999929),
(-83637.73039999977, -100993.7087999992),
(-83652.75889999978, -100992.65210000053),
(-83651.5382000003, -100975.29350000061),
(-83636.50970000029, -100976.35019999929),
],
"height": 21.0,
},
{
"name": "PN1001_Bld1004_EL23",
"coordinates": [
(-83635.2890999997, -100958.99369999953),
(-83650.31759999972, -100957.93679999933),
(-83648.50050000008, -100932.0979999993),
(-83634.0064000003, -100940.75280000083),
(-83635.2890999997, -100958.99369999953),
],
"height": 21.0,
},
{
"name": "PN1001_Bld1004_EL24",
"coordinates": [
(-83635.2890999997, -100958.99369999953),
(-83636.50970000029, -100976.35019999929),
(-83651.5382000003, -100975.29350000061),
(-83650.31759999972, -100957.93679999933),
(-83635.2890999997, -100958.99369999953),
],
"height": 21.0,
},
]
zones = [
{
"name": "PN1001_Bld1003 Zone1",
"coordinates": [
(-83637.86197158082, -100995.57970000058),
(-83623.76818996808, -100995.57970000058),
(-83629.44400000013, -101021.71050000004),
(-83639.28569999989, -101015.82459999993),
(-83637.86197158082, -100995.57970000058),
],
"height": 3.0,
"num_stories": 1,
},
{
"name": "PN1001_Bld1003 Zone2",
"coordinates": [
(-83623.76818996808, -100995.57970000058),
(-83637.86197158082, -100995.57970000058),
(-83637.73039999977, -100993.7087999992),
(-83636.55229433342, -100976.95590000041),
(-83619.72295787116, -100976.95590000041),
(-83623.76818996808, -100995.57970000058),
],
"height": 3.0,
"num_stories": 1,
},
{
"name": "PN1001_Bld1003 Zone3",
"coordinates": [
(-83614.40199999977, -100952.4587999992),
(-83616.24896021019, -100960.96199999936),
(-83635.42752116646, -100960.96199999936),
(-83635.2890999997, -100958.99369999953),
(-83634.0064000003, -100940.75280000083),
(-83614.40199999977, -100952.4587999992),
],
"height": 3.0,
"num_stories": 1,
},
{
"name": "PN1001_Bld1003 Zone4",
"coordinates": [
(-83616.24896021019, -100960.96199999936),
(-83619.72295787116, -100976.95590000041),
(-83636.55229433342, -100976.95590000041),
(-83636.50970000029, -100976.35019999929),
(-83635.42752116646, -100960.96199999936),
(-83616.24896021019, -100960.96199999936),
],
"height": 3.0,
"num_stories": 1,
},
]
return {"zones": zones, "shadows": shadow_blocks}
def test_basic_shadow_matching(new_idf):
"""
Test with all x-axis at 0
This should avoid any issues with rounding/almost_equals.
"""
try:
ggr = new_idf.idfobjects["GLOBALGEOMETRYRULES"][0]
except IndexError:
ggr = None
wall = new_idf.newidfobject(
"BUILDINGSURFACE:DETAILED", Name="A Wall", Surface_Type="wall"
)
set_coords(wall, [(0, 0, 0), (0, 1, 0), (0, 1, 1), (0, 0, 1)], ggr)
shadow = new_idf.newidfobject("SHADING:SITE:DETAILED", Name="A Shadow")
set_coords(shadow, [(0, 0, 2), (0, 2, 2), (0, 2, 0), (0, 0, 0)], ggr)
new_idf.intersect_match()
# new_idf.view_model()
walls = [
Polygon3D(w.coords)
for w in new_idf.getsurfaces("wall")
if w.Outside_Boundary_Condition == "adiabatic"
]
expected_adiabatic = 1
assert len(walls) == expected_adiabatic
def test_simple_shadow_matching(new_idf):
"""Test in a single plane, but angled."""
try:
ggr = new_idf.idfobjects["GLOBALGEOMETRYRULES"][0]
except IndexError:
ggr = None
wall1 = new_idf.newidfobject(
"BUILDINGSURFACE:DETAILED", Name="Wall 1", Surface_Type="wall"
)
set_coords(
wall1,
[
(1.5553000001236796, 28.001700000837445, 3.0),
(1.5553000001236796, 28.001700000837445, -1.0),
(2.7759999996051192, 45.36030000075698, -1.0),
(2.7759999996051192, 45.36030000075698, 3.0),
],
ggr,
)
shadow = new_idf.newidfobject("SHADING:SITE:DETAILED", Name="A Shadow")
set_coords(
shadow,
[
(2.7759999996051192, 45.36030000075698, 21.0),
(2.7759999996051192, 45.36030000075698, 0.0),
(1.5553000001236796, 28.001700000837445, 0.0),
(1.5553000001236796, 28.001700000837445, 21.0),
],
ggr,
)
new_idf.intersect_match()
# new_idf.view_model()
walls = [
Polygon3D(w.coords)
for w in new_idf.getsurfaces("wall")
if w.Outside_Boundary_Condition == "adiabatic"
]
expected_adiabatic = 1
assert len(walls) == expected_adiabatic
def test_shadow_matching(new_idf, shadow_matching):
"""Test with a full model."""
for block in shadow_matching["shadows"]:
new_idf.add_shading_block(**block)
for block in shadow_matching["zones"]:
new_idf.add_block(**block)
new_idf.translate_to_origin()
new_idf.intersect_match()
adiabatic = [
Polygon3D(w.coords)
for w in new_idf.getsurfaces("wall")
if w.Outside_Boundary_Condition == "adiabatic"
]
expected_adiabatic = 7
assert len(adiabatic) == expected_adiabatic
def test_shadow_intersecting(new_idf, shadow_matching):
"""Test with a full model."""
for block in shadow_matching["shadows"]:
new_idf.add_shading_block(**block)
for block in shadow_matching["zones"]:
new_idf.add_block(**block)
new_idf.translate_to_origin()
new_idf.intersect()
shadows = [Polygon3D(s.coords) for s in new_idf.getshadingsurfaces()]
assert len(shadows) == 23
|
jamiebull1/geomeppy
|
tests/test_regressions.py
|
Python
|
mit
| 7,185
|
from rllab.envs.base import Env
from rllab.envs.base import Step
from rllab.spaces import Box
import numpy as np
class MultiMod2DEnv(Env):
"""
This is a single time-step MDP where the action taken corresponds to the next state (in a 2D plane).
The reward has a multi-modal gaussian shape, with the mode means set in a circle around the origin.
"""
def __init__(self, mu=(1, 0), sigma=0.01, n=2, rand_init=False):
self.mu = np.array(mu)
self.sigma = sigma #we suppose symetric Gaussians
self.n = n
self.rand_init = rand_init
@property
def observation_space(self):
return Box(low=-np.inf, high=np.inf, shape=(2,))
@property
def action_space(self):
return Box(low=5.0 * np.linalg.norm(self.mu), high=5.0 * np.linalg.norm(self.mu), shape=(2,))
def reset(self):
self._state = np.zeros(shape=(2,)) \
+ int(self.rand_init) * (
(np.random.rand(2, ) - 0.5) * 5 * np.linalg.norm(self.mu) ) ##mu is taken as largest
observation = np.copy(self._state)
return observation
def reward_state(self, state):
x = state
mu = self.mu
A = np.array([[np.cos(2. * np.pi / self.n), -np.sin(2. * np.pi / self.n)],
[np.sin(2. * np.pi / self.n), np.cos(2. * np.pi / self.n)]]) ##rotation matrix
reward = -0.5 + 1. / (2 * np.sqrt(np.power(2. * np.pi, 2.) * self.sigma)) * (
np.exp(-0.5 / self.sigma * np.linalg.norm(x - mu) ** 2))
for i in range(1, self.n):
mu = np.dot(A, mu)
reward += 1. / (2 * np.sqrt(np.power(2. * np.pi, 2.) * self.sigma)) * (
np.exp(-0.5 / self.sigma * np.linalg.norm(x - mu) ** 2))
return reward
def step(self, action):
self._state += action
done = True
next_observation = np.copy(self._state)
reward = self.reward_state(self._state)
return Step(observation=next_observation, reward=reward, done=done)
def render(self):
print('current state:', self._state)
def log_diagnostics(self, paths):
# to count the modes I need the current policy!
pass
|
florensacc/snn4hrl
|
envs/point/multiMod2D_env.py
|
Python
|
mit
| 2,207
|
import getopt
import os
import sys
from updatewrapper.host import Host
from updatewrapper.utils.display import ask_yes_no, print_banner, print_info, print_notice, print_success, print_warning
from updatewrapper.utils.file import get_config_file, get_hosts, get_logfile, save_output
from updatewrapper.flavor import detect_flavor, get_flavor_wrapper
def wrap(hosts, out_dir, dist_upgrade):
print_banner()
logfiles = []
print('Wrapping updates for the following hosts:')
for host in hosts:
print(' * %s' % host.name)
print()
for host in hosts:
try:
if not host.enabled:
print_warning('SKIPPING host %s' % host.addr)
continue
print_info('BEGIN host %s' % host.addr)
host.ask_passwords()
if host.flavor is None:
print_success('Detecting host flavor')
host.flavor = detect_flavor(host)
print()
wrapper = get_flavor_wrapper(host, dist_upgrade)
print_success('Updating index cache')
wrapper.update_cache()
print_success('Listing available package upgrades')
returns = wrapper.check_update()
if wrapper.has_update(*returns):
print_warning('Some packages need to be upgraded')
if ask_yes_no('Do you want to continue?'):
print_success('Installing available package upgrades')
returncode, stdout, stderr = wrapper.perform_update()
logfile = get_logfile(host.name, out_dir)
save_output(logfile, stdout)
logfiles.append(logfile)
else:
print_success('All packages are up-to-date')
print_notice('END host %s' % host.addr)
except KeyboardInterrupt:
print()
print()
print('bye')
break
if logfiles:
print('The following logfiles were created:')
for logfile in logfiles:
print(' * %s' % os.path.basename(logfile))
def main():
opts, args = getopt.getopt(sys.argv[1:], 'c:h:o', ['config=', 'dist-upgrade', 'host=', 'out-dir='])
config_file = get_config_file()
hosts = []
host = None
out_dir = os.getcwd()
dist_upgrade = False
for opt in opts:
if opt[0] in ('-c', '--config'):
config_file = opt[1]
elif opt[0] == '--dist-upgrade':
dist_upgrade = True
elif opt[0] in ('-h', '--host'):
addr = opt[1]
host = Host(addr=addr) # TODO: Should allow to input other parameters or search from config
elif opt[0] in ('-o', '--out-dir'):
out_dir = opt[1]
if host:
hosts.append(host)
else:
hosts = get_hosts(config_file)
wrap(hosts, out_dir, dist_upgrade)
if __name__ == "__main__":
main()
|
EpicScriptTime/update-wrapper
|
updatewrapper/__init__.py
|
Python
|
mit
| 2,934
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from powerpages.models import Page
from powerpages.sync import PageFileDumper
from powerpages.admin import website_link, sync_status, save_page
from powerpages.signals import page_edited
from .test_sync import BaseSyncTestCase
class WebsiteLinkTestCase(TestCase):
maxDiff = None
def test_no_object(self):
self.assertIsNone(website_link(None))
def test_empty_url(self):
self.assertEqual(
website_link(Page(url='')),
'<a href="" style="font-weight: normal;"> »</a>'
)
def test_root_url(self):
self.assertEqual(
website_link(Page(url='/')),
'<a href="/" style="font-weight: normal;">/ »</a>'
)
def test_first_level_url(self):
self.assertEqual(
website_link(Page(url='/test/')),
'<a href="/test/" style="font-weight: normal;">'
'/<span style="font-weight: bold">test</span>/'
' »</a>'
)
def test_second_level_url(self):
self.assertEqual(
website_link(Page(url='/nested/test/')),
'<a href="/nested/test/" style="font-weight: normal;">'
'/nested/<span style="font-weight: bold">test</span>/'
' »</a>'
)
def test_file(self):
self.assertEqual(
website_link(Page(url='/robots.txt')),
'<a href="/robots.txt" style="font-weight: normal;">'
'/<span style="font-weight: bold">robots.txt</span>'
' »</a>'
)
def test_nested_file(self):
self.assertEqual(
website_link(Page(url='/nested/robots.txt')),
'<a href="/nested/robots.txt" style="font-weight: normal;">'
'/nested/<span style="font-weight: bold">robots.txt</span>'
' »</a>'
)
class SyncStatusTestCase(BaseSyncTestCase):
maxDiff = None
def test_no_object(self):
self.assertIsNone(sync_status(None))
def test_file_synced(self):
page = Page.objects.create(
url='/test-page/', template='<h1>Test Page</h1>'
)
PageFileDumper(page).save()
self.assertEqual(
sync_status(page),
'<span style="color: green">File is synced</span>'
)
def test_file_content_differs(self):
page = Page.objects.create(
url='/test-page/', template='<h1>Test Page</h1>'
)
PageFileDumper(page).save()
page.title = 'Lorem Ipsum'
page.save()
self.assertEqual(
sync_status(page),
'<span style="color: orange">File content differs</span>'
)
def test_file_is_missing(self):
page = Page.objects.create(
url='/test-page/', template='<h1>Test Page</h1>'
)
self.assertEqual(
sync_status(page),
'<span style="color: red">File is missing</span>'
)
def test_file_content_differs_modified_in_admin(self):
page = Page.objects.create(
url='/test-page/', template='<h1>Test Page</h1>'
)
PageFileDumper(page).save()
page.title = 'Lorem Ipsum'
page.is_dirty = True # modified in Admin
page.save()
self.assertEqual(
sync_status(page),
'<span style="color:black; font-weight:bold">'
'Changed in Admin!</span><br>'
'<span style="color: orange">File content differs</span>'
)
class SavePageTestCase(TestCase):
maxDiff = None
def setUp(self):
def page_edited_test_handler(sender, **kwargs):
self.page_edited_kwargs = kwargs
self.page_edited_kwargs = None
page_edited.connect(
page_edited_test_handler, dispatch_uid='test_page_edited',
weak=False
)
def tearDown(self):
page_edited.disconnect(dispatch_uid='test_page_edited')
self.page_edited_kwargs = None
def test_create_page(self):
page = Page(url='/test-page/')
user = User.objects.create_user('admin-user')
save_page(page=page, user=user, created=True)
self.assertIsNotNone(page.pk)
self.assertTrue(page.is_dirty)
self.assertDictContainsSubset(
{'page': page, 'user': user, 'created': True},
self.page_edited_kwargs
)
def test_modify_page(self):
page = Page.objects.create(url='/test-page/', title='Lorem')
page.title = 'Ipsum'
user = User.objects.create_user('admin-user')
save_page(page=page, user=user, created=False)
self.assertEqual(Page.objects.get(pk=page.pk).title, 'Ipsum')
self.assertTrue(page.is_dirty)
self.assertDictContainsSubset(
{'page': page, 'user': user, 'created': False},
self.page_edited_kwargs
)
class SwitchEditModeViewTestCase(TestCase):
maxDiff = None
def setUp(self):
self.url = reverse('switch_edit_mode')
self.staff_member = User.objects.create_user(
'staff_member', password='letmein123', is_staff=True
)
self.super_user = User.objects.create_user(
'super_user', password='letmein123', is_superuser=True
)
self.regular_user = User.objects.create_user(
'regular_user', password='letmein123'
)
Page.objects.create(url='/')
Page.objects.create(url='/test-page/')
def test_enable_edit_mode_staff_member_referrer(self):
self.client.login(username='staff_member', password='letmein123')
response = self.client.get(self.url, HTTP_REFERER='/test-page/')
self.assertTrue(self.client.session.get('WEBSITE_EDIT_MODE'))
self.assertRedirects(response, '/test-page/')
def test_disable_edit_mode_staff_member_no_referrer(self):
self.client.login(username='staff_member', password='letmein123')
session = self.client.session
session['WEBSITE_EDIT_MODE'] = True
session.save()
response = self.client.get(self.url)
self.assertNotIn('WEBSITE_EDIT_MODE', self.client.session)
self.assertRedirects(response, '/')
def test_enable_edit_mode_super_user_no_referrer(self):
self.client.login(username='super_user', password='letmein123')
response = self.client.get(self.url)
self.assertTrue(self.client.session.get('WEBSITE_EDIT_MODE'))
self.assertRedirects(response, '/')
def test_disable_edit_mode_super_user_referrer(self):
self.client.login(username='super_user', password='letmein123')
session = self.client.session
session['WEBSITE_EDIT_MODE'] = True
session.save()
response = self.client.get(self.url, HTTP_REFERER='/test-page/')
self.assertNotIn('WEBSITE_EDIT_MODE', self.client.session)
self.assertRedirects(response, '/test-page/')
def test_access_forbidden_regular_user(self):
self.client.login(username='regular_user', password='letmein123')
response = self.client.get(self.url)
self.assertRedirects(
response, '{0}?next={1}'.format(settings.LOGIN_URL, self.url),
fetch_redirect_response=False
)
def test_access_forbidden_anonmous(self):
response = self.client.get(self.url)
self.assertRedirects(
response, '{0}?next={1}'.format(settings.LOGIN_URL, self.url),
fetch_redirect_response=False
)
|
Open-E-WEB/django-powerpages
|
powerpages/tests/test_admin.py
|
Python
|
mit
| 7,705
|
import os
import signal
import time
import unittest
import stomp
from stomp import exception
from stomp.backward import monotonic
from stomp.listener import TestListener
from stomp.test.testutils import *
class TestBasicSend(unittest.TestCase):
def setUp(self):
conn = stomp.Connection(get_default_host())
listener = TestListener('123')
conn.set_listener('', listener)
conn.start()
conn.connect(get_default_user(), get_default_password(), wait=True)
self.conn = conn
self.listener = listener
self.timestamp = time.strftime('%Y%m%d%H%M%S')
def tearDown(self):
if self.conn:
self.conn.disconnect(receipt=None)
def test_basic(self):
queuename = '/queue/test1-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='auto')
self.conn.send(body='this is a test', destination=queuename, receipt='123')
self.listener.wait_for_message()
self.assertTrue(self.listener.connections == 1, 'should have received 1 connection acknowledgement')
self.assertTrue(self.listener.messages == 1, 'should have received 1 message')
self.assertTrue(self.listener.errors == 0, 'should not have received any errors')
def test_commit(self):
queuename = '/queue/test2-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='auto')
trans_id = self.conn.begin()
self.conn.send(body='this is a test1', destination=queuename, transaction=trans_id)
self.conn.send(body='this is a test2', destination=queuename, transaction=trans_id)
self.conn.send(body='this is a test3', destination=queuename, transaction=trans_id, receipt='123')
time.sleep(3)
self.assertTrue(self.listener.connections == 1, 'should have received 1 connection acknowledgement')
self.assertTrue(self.listener.messages == 0, 'should not have received any messages')
self.conn.commit(transaction=trans_id)
self.listener.wait_for_message()
time.sleep(3)
self.assertTrue(self.listener.messages == 3, 'should have received 3 messages')
self.assertTrue(self.listener.errors == 0, 'should not have received any errors')
def test_abort(self):
queuename = '/queue/test3-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='auto')
trans_id = self.conn.begin()
self.conn.send(body='this is a test1', destination=queuename, transaction=trans_id)
self.conn.send(body='this is a test2', destination=queuename, transaction=trans_id)
self.conn.send(body='this is a test3', destination=queuename, transaction=trans_id)
time.sleep(3)
self.assertTrue(self.listener.connections == 1, 'should have received 1 connection acknowledgement')
self.assertTrue(self.listener.messages == 0, 'should not have received any messages')
self.conn.abort(transaction=trans_id)
time.sleep(3)
self.assertTrue(self.listener.messages == 0, 'should not have received any messages')
self.assertTrue(self.listener.errors == 0, 'should not have received any errors')
def test_timeout(self):
conn = stomp.Connection([('192.0.2.0', 60000)], timeout=5, reconnect_attempts_max=1)
conn.set_listener('', self.listener)
try:
ms = monotonic()
conn.start()
self.fail("shouldn't happen")
except exception.ConnectFailedException:
pass # success!
ms = monotonic() - ms
self.assertTrue(ms > 5.0, 'connection timeout should have been at least 5 seconds')
def test_childinterrupt(self):
def childhandler(signum, frame):
print("received child signal")
oldhandler = signal.signal(signal.SIGCHLD, childhandler)
queuename = '/queue/test5-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='auto', receipt='123')
self.listener.wait_on_receipt()
self.conn.send(body='this is an interrupt test 1', destination=queuename)
print("causing signal by starting child process")
os.system("sleep 1")
time.sleep(1)
signal.signal(signal.SIGCHLD, oldhandler)
print("completed signal section")
self.conn.send(body='this is an interrupt test 2', destination=queuename, receipt='123')
self.listener.wait_for_message()
self.assertTrue(self.listener.connections == 1, 'should have received 1 connection acknowledgment')
self.assertTrue(self.listener.errors == 0, 'should not have received any errors')
self.assertTrue(self.conn.is_connected(), 'should still be connected to STOMP provider')
def test_clientack(self):
queuename = '/queue/testclientack-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='client')
self.conn.send(body='this is a test', destination=queuename, receipt='123')
self.listener.wait_for_message()
(headers, _) = self.listener.get_latest_message()
message_id = headers['message-id']
subscription = headers['subscription']
self.conn.ack(message_id, subscription)
def test_clientnack(self):
queuename = '/queue/testclientnack-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='client')
self.conn.send(body='this is a test', destination=queuename, receipt='123')
self.listener.wait_for_message()
(headers, _) = self.listener.get_latest_message()
message_id = headers['message-id']
subscription = headers['subscription']
self.conn.nack(message_id, subscription)
def test_specialchars(self):
queuename = '/queue/testspecialchars-%s' % self.timestamp
self.conn.subscribe(destination=queuename, id=1, ack='client')
hdrs = {
'special-1': 'test with colon : test',
'special-2': 'test with backslash \\ test',
'special-3': 'test with newline \n'
}
self.conn.send(body='this is a test', headers=hdrs, destination=queuename, receipt='123')
self.listener.wait_for_message()
(headers, _) = self.listener.get_latest_message()
_ = headers['message-id']
_ = headers['subscription']
self.assertTrue('special-1' in headers)
self.assertEqual('test with colon : test', headers['special-1'])
self.assertTrue('special-2' in headers)
self.assertEqual('test with backslash \\ test', headers['special-2'])
self.assertTrue('special-3' in headers)
self.assertEqual('test with newline \n', headers['special-3'])
class TestConnectionErrors(unittest.TestCase):
def test_connect_wait_error(self):
conn = stomp.Connection(get_default_host())
conn.start()
try:
conn.connect('invalid', 'user', True)
self.fail("Shouldn't happen")
except:
pass
def test_connect_nowait_error(self):
conn = stomp.Connection(get_default_host())
conn.start()
try:
conn.connect('invalid', 'user', False)
self.assertFalse(conn.is_connected(), 'Should not be connected')
except:
self.fail("Shouldn't happen")
|
GeneralizedLearningUtilities/SuperGLU
|
python_module/stomp/test/basic_test.py
|
Python
|
mit
| 7,571
|
"""
WSGI config for ccbc_library project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ccbc_library.deploy_settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
comsaint/ccbc-library
|
ccbc_library/wsgi.py
|
Python
|
mit
| 441
|
from main import GuessManager
def test_init_uppercase():
g = GuessManager('SOMEWORD')
assert g.word == 'SOMEWORD'
assert g.mask == [False]*8
def test_init_mask():
mask = [True, False, True, False, True, False, True, False]
g = GuessManager('SOMEWORD', mask=mask)
assert g.word == 'SOMEWORD'
assert g.mask == mask
def test_init_mword():
g = GuessManager('ABC', mask=[False, True, False])
assert g._mword == [('A', False), ('B', True), ('C', False)]
def test_init_lowercase():
g = GuessManager('someword')
assert g.word == 'SOMEWORD'
assert g.mask == [False]*8
def test_init_guessed_letters():
g = GuessManager('someword')
assert g.guessed_letters == set()
def test_init_tried_letters():
g = GuessManager('someword')
assert g.tried_letters == set()
def test_init_hidden_letters():
g = GuessManager('someword')
assert g.hidden_letters == set(['S', 'O', 'M', 'E', 'W', 'R', 'D'])
def test_init_hidden_letters_checks_mask():
g = GuessManager('some', mask=[True, False, False, False])
assert g.hidden_letters == set(['O', 'M', 'E'])
def test_len():
g = GuessManager('someword')
assert g.len == 8
def test_len_checks_mask():
g = GuessManager('a b', mask=[False, True, False])
assert g.len == 2
def test_guessed():
g = GuessManager('a b', mask=[False, True, False])
assert g.guessed == 0
def test_missing():
g = GuessManager('someword')
assert g.missing == 8
def test_missing_checks_mask():
g = GuessManager('a b', mask=[False, True, False])
assert g.missing == 2
def test_status():
g = GuessManager('someword')
assert g.status == [None] * 8
def test_status_check_mask():
g = GuessManager('some', [True, False, True, False])
assert g.status == ['S', None, 'M', None]
def test_guess_letter():
g = GuessManager('someword')
res = g.guess('m')
assert g.guessed_letters == set(['M'])
assert g.tried_letters == set(['M'])
assert g.guessed == 1
assert res == 1
assert g.missing == 7
assert g.status == [None, None, 'M', None, None, None, None, None]
def test_guess_letter_with_mask():
g = GuessManager('a bc', mask=[False, True, False, False])
res = g.guess('a')
assert g.guessed_letters == set(['A'])
assert g.tried_letters == set(['A'])
assert g.guessed == 1
assert res == 1
assert g.missing == 2
assert g.status == ['A', ' ', None, None]
def test_guess_more_than_one_letter():
g = GuessManager('someword')
res = g.guess('o')
assert g.guessed_letters == set(['O'])
assert g.tried_letters == set(['O'])
assert g.guessed == 2
assert res == 2
assert g.missing == 6
assert g.status == [None, 'O', None, None, None, 'O', None, None]
def test_guess_multiple_calls_same_letter():
g = GuessManager('someword')
res = g.guess('o')
res = g.guess('o')
assert g.guessed_letters == set(['O'])
assert g.tried_letters == set(['O'])
assert g.guessed == 2
assert res == 0
assert g.missing == 6
assert g.status == [None, 'O', None, None, None, 'O', None, None]
def test_guess_multiple_calls_different_letters():
g = GuessManager('someword')
g.guess('o')
g.guess('m')
assert g.guessed_letters == set(['O', 'M'])
assert g.tried_letters == set(['O', 'M'])
assert g.guessed == 3
assert g.missing == 5
assert g.status == [None, 'O', 'M', None, None, 'O', None, None]
def test_wrong_guess():
g = GuessManager('someword')
res = g.guess('x')
assert g.guessed_letters == set()
assert g.tried_letters == set(['X'])
assert g.guessed == 0
assert res == 0
assert g.missing == 8
assert g.status == [None, None, None, None, None, None, None, None]
def test_guess_word_successful():
g = GuessManager('someword')
res = g.guess_word('someword')
assert g.guessed_letters == set(['S', 'O', 'M', 'E', 'W', 'O', 'R', 'D'])
assert g.tried_letters == set()
assert g.guessed == 8
assert res == 8
assert g.missing == 0
assert g.status == list('someword'.upper())
def test_guess_word_checks_mask():
g = GuessManager('a (19)', mask=[False, True, True, True, True, True])
res = g.guess_word('a (19)')
assert g.guessed_letters == set(['A'])
assert g.tried_letters == set()
assert g.guessed == 1
assert res == 1
assert g.missing == 0
assert g.status == list('a (19)'.upper())
def test_guess_word_successful_after_guessed_letters():
g = GuessManager('someword')
g.guess('s')
g.guess('o')
res = g.guess_word('someword')
assert g.guessed_letters == set(['S', 'O', 'M', 'E', 'W', 'O', 'R', 'D'])
assert g.tried_letters == set(['S', 'O'])
assert g.guessed == 8
assert res == 5
assert g.missing == 0
assert g.status == list('someword'.upper())
def test_guess_word_unsuccessful():
g = GuessManager('someword')
res = g.guess_word('sameward')
assert g.guessed_letters == set()
assert g.tried_letters == set()
assert res == 0
assert g.missing == 8
assert g.status == [None, None, None, None, None, None, None, None]
def test_guess_word_unsuccessful_after_guessed_letters():
g = GuessManager('someword')
g.guess('s')
g.guess('o')
res = g.guess_word('somelord')
assert g.guessed_letters == set(['S', 'O'])
assert g.tried_letters == set(['S', 'O'])
assert res == 0
assert g.missing == 5
assert g.status == ['S', 'O', None, None, None, 'O', None, None]
|
lgiordani/slack_hangman
|
tests/test_guess_manager.py
|
Python
|
mit
| 5,541
|
import pathlib
import numpy as np
import pytest
import meshio
from . import helpers
@pytest.mark.parametrize(
"mesh",
[
# helpers.empty_mesh,
helpers.tri_mesh,
helpers.quad_mesh,
helpers.tri_quad_mesh,
helpers.add_point_data(helpers.tri_mesh, 1, dtype=int),
helpers.add_point_data(helpers.tri_mesh, 1, dtype=float),
helpers.line_mesh,
helpers.polygon_mesh,
# helpers.add_cell_data(helpers.tri_mesh, [("a", (), np.float64)]),
# helpers.add_cell_data(helpers.tri_mesh, [("a", (2,), np.float64)]),
# helpers.add_cell_data(helpers.tri_mesh, [("a", (3,), np.float64)]),
],
)
@pytest.mark.parametrize("binary", [False, True])
def test_ply(mesh, binary, tmp_path):
def writer(*args, **kwargs):
return meshio.ply.write(*args, binary=binary, **kwargs)
for k, c in enumerate(mesh.cells):
mesh.cells[k] = meshio.CellBlock(c.type, c.data.astype(np.int32))
helpers.write_read(tmp_path, writer, meshio.ply.read, mesh, 1.0e-12)
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells",
[
("bun_zipper_res4.ply", 3.414583969116211e01, 948),
("tet.ply", 6, 4),
],
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = pathlib.Path(__file__).resolve().parent
filename = this_dir / "meshes" / "ply" / filename
mesh = meshio.read(filename)
tol = 1.0e-2
s = np.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.get_cells_type("triangle")) == ref_num_cells
@pytest.mark.parametrize("binary", [False, True])
def test_no_cells(binary):
import io
vertices = np.random.random((30, 3))
mesh = meshio.Mesh(vertices, [])
file = io.BytesIO()
mesh.write(file, "ply", binary=binary)
mesh2 = meshio.read(io.BytesIO(file.getvalue()), "ply")
assert np.array_equal(mesh.points, mesh2.points)
assert len(mesh2.cells) == 0
|
nschloe/meshio
|
tests/test_ply.py
|
Python
|
mit
| 1,964
|
import os
import imp
import sys
import six
class DotImportHook:
def find_module(self, fullname, path=None):
bits = fullname.split('.')
if len(bits) <= 1:
return
for folder in sys.path:
if os.path.exists(os.path.join(folder, fullname)):
return self
for i in range(1, len(bits) - 1):
package, mod = '.'.join(bits[:i]), '.'.join(bits[i:])
path = sys.modules[package].__path__
for folder in path:
if os.path.exists(os.path.join(folder, mod)):
return self
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
sys.modules[fullname] = module = imp.new_module(fullname)
if '.' in fullname:
pkg, name = fullname.rsplit('.', 1)
path = sys.modules[pkg].__path__
else:
pkg, name = '', fullname
path = sys.path
module.__package__ = pkg
module.__loader__ = self
bits = fullname.split('.')
if len(bits) <= 1:
return module
for folder in sys.path:
pathfunc = lambda *args: os.path.join(folder, fullname, *args)
if os.path.exists(pathfunc()):
module.__path__ = [pathfunc()]
module.__file__ = pathfunc('__init__.pyc')
six.exec_(open(pathfunc('__init__.py')).read(), module.__dict__)
return module
for i in range(1, len(bits) - 1):
package, mod = '.'.join(bits[:i]), '.'.join(bits[i:])
path = sys.modules[package].__path__
for folder in path:
pathfunc = lambda *args: os.path.join(folder, mod, *args)
if os.path.exists(pathfunc()):
module.__path__ = [pathfunc()]
module.__file__ = pathfunc('__init__.pyc')
six.exec_(open(pathfunc('__init__.py')).read(), module.__dict__)
return module
# somehow not found, delete from sys.modules
del sys.modules[fullname]
# support reload()ing this module
try:
hook
except NameError:
pass
else:
try:
sys.meta_path.remove(hook)
except ValueError:
# not found, skip removing
pass
# automatically install hook
hook = DotImportHook()
sys.meta_path.insert(0, hook)
|
sciyoshi/dotmod
|
dotmod.py
|
Python
|
mit
| 1,987
|
# Use default debug configuration or local configuration
try:
from .config_local import *
except ImportError:
from .config_default import *
|
steelart/ask-navalny
|
django-backend/config/config.py
|
Python
|
mit
| 148
|
# coding=utf-8
"""
The SNMPRawCollector is designed for collecting data from SNMP-enables devices,
using a set of specified OIDs
#### Configuration
Below is an example configuration for the SNMPRawCollector. The collector
can collect data any number of devices by adding configuration sections
under the *devices* header. By default the collector will collect every 60
seconds. This might be a bit excessive and put unnecessary load on the
devices being polled. You may wish to change this to every 300 seconds. However
you need modify your graphite data retentions to handle this properly.
```
# Options for SNMPRawCollector
enabled = True
interval = 60
[devices]
# Start the device configuration
# Note: this name will be used in the metric path.
[[my-identification-for-this-host]]
host = localhost
port = 161
community = public
# Start the OID list for this device
# Note: the value part will be used in the metric path.
[[[oids]]]
1.3.6.1.4.1.2021.10.1.3.1 = cpu.load.1min
1.3.6.1.4.1.2021.10.1.3.2 = cpu.load.5min
1.3.6.1.4.1.2021.10.1.3.3 = cpu.load.15min
# If you want another host, you can. But you probably won't need it.
[[another-identification]]
host = router1.example.com
port = 161
community = public
[[[oids]]]
oid = metric.path
oid = metric.path
```
Note: If you modify the SNMPRawCollector configuration, you will need to
restart diamond.
#### Dependencies
* pysmnp (which depends on pyasn1 0.1.7 and pycrypto)
"""
import os
import sys
import time
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'snmp'))
from snmp import SNMPCollector as parent_SNMPCollector
from diamond.metric import Metric
class SNMPRawCollector(parent_SNMPCollector):
def __init__(self, *args, **kwargs):
super(SNMPRawCollector, self).__init__(*args, **kwargs)
# list to save non-existing oid's per device, to avoid repetition of
# errors in logging. restart diamond/collector to flush this
self.skip_list = []
def get_default_config(self):
"""
Override SNMPCollector.get_default_config method to provide
default_config for the SNMPInterfaceCollector
"""
default_config = super(SNMPRawCollector,
self).get_default_config()
default_config.update({
'oids': {},
'path_prefix': 'servers',
'path_suffix': 'snmp',
})
return default_config
def _precision(self, value):
"""
Return the precision of the number
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _skip(self, device, oid, reason=None):
self.skip_list.append((device, oid))
if reason is not None:
self.log.warn('Muted \'{0}\' on \'{1}\', because: {2}'.format(
oid, device, reason))
def _get_value_walk(self, device, oid, host, port, community):
data = self.walk(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#2)')
return
self.log.debug('Data received from WALK \'{0}\': [{1}]'.format(
device, data))
if len(data) != 1:
self._skip(device, oid,
'unexpected response, data has {0} entries'.format(
len(data)))
return
# because we only allow 1-key dicts, we can pick with absolute index
value = data.items()[0][1]
return value
def _get_value(self, device, oid, host, port, community):
data = self.get(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#1)')
return
self.log.debug('Data received from GET \'{0}\': [{1}]'.format(
device, data))
if len(data) == 0:
self._skip(device, oid, 'empty response, device down?')
return
if oid not in data:
# oid is not even in hierarchy, happens when using 9.9.9.9
# but not when using 1.9.9.9
self._skip(device, oid, 'no object at OID (#1)')
return
value = data[oid]
if value == 'No Such Object currently exists at this OID':
self._skip(device, oid, 'no object at OID (#2)')
return
if value == 'No Such Instance currently exists at this OID':
return self._get_value_walk(device, oid, host, port, community)
return value
def collect_snmp(self, device, host, port, community):
"""
Collect SNMP interface data from device
"""
self.log.debug(
'Collecting raw SNMP statistics from device \'{0}\''.format(device))
for device in self.config['devices']:
dev_config = self.config['devices'][device]
if not 'oids' in dev_config:
continue
for oid, metricName in dev_config['oids'].items():
if (device, oid) in self.skip_list:
self.log.debug(
'Skipping OID \'{0}\' ({1}) on device \'{2}\''.format(
oid, metricName, device))
continue
timestamp = time.time()
value = self._get_value(device, oid, host, port, community)
if value is None:
continue
self.log.debug(
'\'{0}\' ({1}) on device \'{2}\' - value=[{3}]'.format(
oid, metricName, device, value))
path = '.'.join([self.config['path_prefix'], device,
self.config['path_suffix'], metricName])
metric = Metric(path, value, timestamp, self._precision(value),
None, 'GAUGE')
self.publish_metric(metric)
|
datafiniti/Diamond
|
src/collectors/snmpraw/snmpraw.py
|
Python
|
mit
| 6,082
|
from distutils.core import setup
setup(
name='ave',
version='0.8.0',
author='Sasha Matijasic',
author_email='[email protected]',
scripts=['ave.sh', ],
url='https://github.com/selectnull/ave',
license='LICENSE',
description='ave Activates Virtual Environment',
long_description=open('README.md').read(),
)
|
selectnull/ave
|
setup.py
|
Python
|
mit
| 345
|
from setuptools import setup
'''
The packages subprocess and tkinter is also required from the standard library
'''
setup(
name='PLOD',
version='1.0',
description='Matplotlib plot designer',
author='David Kleiven',
licence='MIT',
author_email='[email protected]',
install_requires=['numpy', 'matplotlib'],
url='https://github.com/davidkleiven/PLOD',
classifiers=[
'Programming Language :: Python :: 3',
],
#py_modules=['plotHandler', 'controlGUI'],
packages=['PLOD']
)
|
davidkleiven/PLOD
|
setup.py
|
Python
|
mit
| 534
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import tensorflow as tf
class BaseConverter(object):
@staticmethod
def to_int64_feature(values):
"""Returns a TF-Feature of int64s.
Args:
values: A scalar or list of values.
Returns:
a TF-Feature.
"""
if not isinstance(values, list):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values))
@staticmethod
def to_bytes_feature(values):
"""Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
a TF-Feature.
"""
if not isinstance(values, list):
values = [values]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=values))
@staticmethod
def to_float_feature(values):
"""Returns a TF-Feature of floats.
Args:
values: A string.
Returns:
a TF-Feature.
"""
if not isinstance(values, list):
values = [values]
return tf.train.Feature(float_list=tf.train.FloatList(value=values))
@classmethod
def to_feature(cls, value, value_type):
if value_type == 'int':
return cls.to_int64_feature(value)
if value_type == 'float':
return cls.to_float_feature(value)
if value_type == 'bytes':
return cls.to_bytes_feature(value)
raise TypeError("value type: `{}` is not supported.".format(value_type))
@classmethod
def to_sequence_feature(cls, sequence, sequence_type):
"""Returns a FeatureList based on a list fo features of type sequence_type
Args:
sequence: list of values
sequence_type: type of the sequence.
Returns:
list of TF-FeatureList
"""
if sequence_type == 'int':
feature_list = [cls.to_int64_feature(i) for i in sequence]
elif sequence_type == 'float':
feature_list = [cls.to_float_feature(i) for i in sequence]
elif sequence_type == 'bytes':
feature_list = [cls.to_bytes_feature(i) for i in sequence]
else:
raise TypeError("sequence type: `{}` is not supported.".format(sequence_type))
return tf.train.FeatureList(feature=feature_list)
|
polyaxon/polyaxon-api
|
polyaxon_lib/datasets/converters/base.py
|
Python
|
mit
| 2,397
|
from selvbetjening.settings_base import *
ROOT_URLCONF = 'sdemo.urls'
# installed applications
INSTALLED_APPS.extend([
'selvbetjening.frontend.base',
'selvbetjening.frontend.auth',
'selvbetjening.frontend.userportal',
'selvbetjening.frontend.eventportal',
'selvbetjening.frontend.eventsingle',
'selvbetjening.api.rest',
'sdemo'
])
# Fix, the sdemo fixtures dir is missing from the fixtures list? lets add it manually
import os
FIXTURE_DIRS = (
os.path.join(os.path.abspath(os.path.dirname(__file__)), 'fixtures'),
)
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/auth/log-in/'
# import localsettings, a per deployment configuration file
try:
from settings_local import *
except ImportError:
pass
|
animekita/selvbetjening
|
sdemo/settings.py
|
Python
|
mit
| 738
|
import os
from setuptools import setup
from twtxtcli import __version__, __project_name__, __project_link__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name=__project_name__,
version=__version__,
author='Myles Braithwaite',
author_email='[email protected]',
description='',
license='BSD',
keywords='twtxt',
url=__project_link__,
packages=['twtxtcli'],
long_description=read('README'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
install_requires=[
'requests',
'clint',
'humanize',
'iso8601'
],
entry_points={
'console_scripts': [
'twtxt-cli = twtxtcli.cli:main'
]
}
)
|
myles/twtxt-cli
|
setup.py
|
Python
|
mit
| 850
|
from AboutWindow import AboutWindow as AboutWindow
from ActionListWidget import ActionListWidget as ActionListWidget
from ActionPushButton import ActionPushButton as ActionPushButton
from CameraWidget import CameraWidget as CameraWidget
from ConnectDialog import ConnectDialog as ConnectDialog
from MainWindow import MainWindow as MainWindow
from MovementWidget import MovementWidget as MovementWidget
from SpeechWidget import SpeechWidget as SpeechWidget
from SubmittableTextEdit import SubmittableTextEdit as SubmittableTextEdit
from SudokuBoard import SudokuBoard as SudokuBoard
from TimerWidget import TimerWidget as TimerWidget
|
mattBrzezinski/Hydrogen
|
robot-controller/UI/__init__.py
|
Python
|
mit
| 632
|
import os
def NormalizedJoin( *args ):
"Normalizes and joins directory names"
return os.path.normpath(os.path.join(*args))
|
DavidYen/YEngine
|
ypy/path_help.py
|
Python
|
mit
| 129
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-28 22:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='State',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=100)),
('country', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.Country')),
],
),
]
|
mchughmk/react-django
|
backend/api/migrations/0001_initial.py
|
Python
|
mit
| 1,092
|
# -*- coding: utf-8 -*-
"""
@author: Tobias Krauss
"""
from lib.Instruction import Instruction
import lib.PseudoInstruction as PI
import lib.StartVal as SV
from lib.PseudoInstruction import (PseudoInstruction,
PseudoOperand)
from lib.Register import (get_reg_class,
get_size_by_reg,
get_reg_by_size)
def add_ret_pop(inst_lst):
"""
@brief converts the 'pop' instructions of 'vret'
to 'vpop' PseudoInstructions
@param inst_lst List of VmInstructions
@return List of PseudoInstructions
"""
#find ret
ret = []
for vinst in inst_lst:
if vinst.Pseudocode.inst_type == PI.RET_T:
for inst in vinst.all_instructions:
if inst.is_pop() and len(inst) != 1:
p_inst = PseudoInstruction('vpop', vinst.addr,
[make_op(inst, 1, -1)])
ret.append(p_inst)
elif inst.is_pop() and len(inst) == 1:
new_op = PseudoOperand(PI.REGISTER_T,
'flags',
SV.dissassm_type,
'flags')
p_inst = PseudoInstruction('vpopf', vinst.addr,
[new_op])
ret.append(p_inst)
ret.append(vinst.Pseudocode)
else:
ret.append(vinst.Pseudocode)
return ret
def to_vpush(p_lst, start_addr):
"""
@brief Converts the 'push' instructions at the beginning
of the virtual machine function to 'vpush' PseudoInstructions
@param p_lst List of instructions
@param start_addr Address where the PseudoInstruction must be
placed
@return List of PseudoInstructions
"""
ret = []
wrote_values = {}
for inst in p_lst:
if not inst.is_push():
if inst.is_mov():
wrote_values[inst.get_op_str(1)] = inst.get_op_str(2)
continue
print inst
if len(inst) != 1:
if inst.op_is_mem(1):
if inst.is_rip_rel():
disp = inst.get_op_disp(1)
disp += inst.addr + inst.opcode_len
new_op = PseudoOperand(PI.MEMORY_T,
'[{0:#x}]'.format(disp),
inst.get_op_size(1),
'', None)
else:
new_op = PseudoOperand(PI.MEMORY_T,
inst.get_op_str(1),
inst.get_op_size(1),
'', None)
ret.append(PseudoInstruction('vpush',
start_addr,
[new_op]))
elif inst.op_is_mem_abs(1):
new_op = PseudoOperand(PI.MEMORY_T,
inst.get_op_str(1),
inst.get_op_size(1),
'', None)
ret.append(PseudoInstruction('vpush',
start_addr,
[new_op]))
elif inst.op_is_reg(1):
wrote_value = False
if inst.get_op_str(1) in wrote_values:
new_op = PseudoOperand(PI.IMMEDIATE_T,
wrote_values[inst.get_op_str(1)],
inst.get_op_size(1),
int(wrote_values[inst.get_op_str(1)], 16))
ret.append(PseudoInstruction('vpush',
start_addr,
[new_op]))
else:
new_op = PseudoOperand(PI.REGISTER_T,
inst.get_op_str(1),
inst.get_op_size(1),
inst.get_reg_name(1))
ret.append(PseudoInstruction('vpush',
start_addr,
[new_op]))
elif inst.op_is_imm(1):
new_op = PseudoOperand(PI.IMMEDIATE_T,
inst.get_op_str(1),
inst.get_op_size(1), '')
ret.append(PseudoInstruction('vpush',
start_addr,
[new_op]))
else:
new_op = PseudoOperand(PI.REGISTER_T, 'flags',
SV.dissassm_type, 'flags')
p_inst = PseudoInstruction('vpushf', start_addr, [new_op])
ret.append(p_inst)
return ret
def make_op(inst, op, catch_value):
"""
@brief convert operands to PseudoOperands
@param inst Instruction with the Operand
@param op number of op; op = 1 for first operand
@param catch_value Value from the obfuscated code
@return PseudoOperand
"""
if(inst.get_op_str(op) == None):
return None
if inst.op_is_mem(op):
return PseudoOperand(PI.MEMORY_T, inst.get_op_str(op),
inst.get_op_size(op), inst.get_reg_name(op),
catch_value)
elif inst.op_is_reg(op):
return PseudoOperand(PI.REGISTER_T, inst.get_op_str(op),
inst.get_op_size(op), inst.get_reg_name(op))
elif inst.op_is_imm(op):
return PseudoOperand(PI.IMMEDIATE_T, inst.get_op_str(op),
inst.get_op_size(op), inst.get_op_value(op))
else:
return None
def extend_signed_catch_val(reg, catch_value):
"""
@brief Sign extends catch_value
@param register Register which contains the catch_value
@param catch_value Value catched form obfuscated code
@return Sign extended catch_value
"""
reg_size = get_size_by_reg(reg)
if reg_size == 8 and catch_value > 0x79:
if SV.dissassm_type == SV.ASSEMBLER_32:
catch_value = 0xffffff00 + catch_value
elif SV.dissassm_type == SV.ASSEMBLER_64:
catch_value = 0xffffffffffffff00 + catch_value
elif reg_size == 16 and catch_value > 0x7900:
if SV.dissassm_type == SV.ASSEMBLER_32:
catch_value = 0xffff0000 + catch_value
elif SV.dissassm_type == SV.ASSEMBLER_64:
catch_value = 0xffffffffffff0000 + catch_value
elif reg_size == 32 and catch_value > 0x79000000:
#there is nothing to do for 32bit
if SV.dissassm_type == SV.ASSEMBLER_64:
catch_value = 0xffffffff00000000 + catch_value
#there is nothing to do for reg_size == 64
return catch_value
class VmInstruction(object):
"""
@brief Converts the exectued x86 code to the corresponding PseudoInstruction
"""
def __init__(self, instr_lst, catch_value, catch_reg, inst_addr):
"""
@param instr_lst List of x86 instructions
@param catch_value Value that is catched from the virtual code
or None if there is no value catched
@param catch_reg Register in which the catch_value is moved
@param inst_addr Address of the VmInstruction
"""
self.all_instructions = instr_lst
self.Vinstructions = []
self.Instructions = []
self.is_signed = False
for inst in instr_lst:
if inst.is_vinst():
self.Vinstructions.append(inst)
else:
self.Instructions.append(inst)
self.Pseudocode = None
self.catch_value = catch_value
self.catch_reg = catch_reg
self.addr = inst_addr
if not self.get_pseudo_code():
mnem_str = ''
for inst in self.all_instructions:
mnem_str += str(inst)
self.Pseudocode= PI.PseudoInstruction(mnem_str, inst_addr, [], 0, PI.UNDEF_T)
print 'Did not find pseudocode at addr: {0:#x}'.format(inst_addr)
def __str__(self):
if self.Pseudocode is not None:
return str(self.Pseudocode)
else:
inst_str = ''
for item in self.all_instructions:
inst_str = inst_str + str(item) + '\n'
return inst_str
def replace_catch_reg(self):
"""
@brief replace the catch_register with its catch_value
"""
if (self.catch_reg == ''):
return
if self.is_signed:
self.catch_value = extend_signed_catch_val(self.catch_reg, self.catch_value)
self.Pseudocode.replace_reg_class(self.catch_reg, self.catch_value)
def get_pseudo_code(self):
"""
@brief tests if its a known VmInstruction
@remark Those tests set the Pseudocode variable with the
corresponding PseudoInstruction
"""
if (self.is_push() or
self.is_pop()):
self.replace_catch_reg()
return True
elif (self.is_nor() or
self.is_add() or
self.is_jmp() or
self.is_write() or
self.is_read() or
self.is_shift_right() or
self.is_shift_left() or
self.is_shld() or
self.is_shrd() or
self.is_vcall() or
self.is_mov_ebp() or
self.is_vret() or
self.is_imul() or
self.is_idiv()):
return True
else:
return False
###########################
# helper functions #
###########################
def get_previous(self, method, pos):
"""
@brief Find previous instruction for which method evaluates True
@param method Evaluation method
@param pos Last position
"""
pos_lst = []
for prev_pos, inst in enumerate(self.Instructions):
if (prev_pos < pos) and method(inst):
pos_lst.append(prev_pos)
return pos_lst
def get_subsequent(self, method, pos):
"""
@brief Find subsequent instruction for which method evaluates True
@param method Evaluation method
@param pos First position
"""
pos_lst = []
for subs_pos, inst in enumerate(self.Instructions):
if (subs_pos > pos) and method(inst):
pos_lst.append(subs_pos)
return pos_lst
########################
# decision functions #
########################
def is_push(self):
"""
@brief Tests if the VmInstruction is a 'vpush'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.Instructions):
if(inst.is_sub_basepointer()):
break
if(get_reg_class(self.catch_reg) == get_reg_class('eax') and
(inst.is_cwde() or inst.is_cbw() or inst.is_cdqe())):
self.is_signed = True
else : # no break
return False
pos_pmov_lst = self.get_subsequent(Instruction.is_write_stack, pos)
if len(pos_pmov_lst) != 1:
return False
push_inst = self.Instructions[pos_pmov_lst[0]]
pos_mov_lst = self.get_previous(Instruction.is_mov, pos)
push_op = make_op(push_inst, 2, self.catch_value)
for pos_mov in pos_mov_lst:
pos_mov_inst = self.Instructions[pos_mov]
if pos_mov_inst.is_read_stack():
return False
if((get_reg_class(push_inst.get_op_str(2)) ==
get_reg_class(pos_mov_inst.get_op_str(1))) and
get_reg_class(push_inst.get_op_str(2)) != None): # too strong condition
push_op = make_op(pos_mov_inst, 2, self.catch_value)
sub_value = self.Instructions[pos].get_op_value(2)
self.Pseudocode = PseudoInstruction('vpush', self.addr, [push_op], sub_value)
return True
# control in comp.vmp loc4041c8
# size von holen und add sub gleich?
def is_pop(self):
"""
@brief Tests if the VmInstruction is a 'vpop'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.Instructions):
if(inst.is_add_basepointer()):
break
else : # no break
return False
pos_pmov_lst = self.get_previous(Instruction.is_read_stack, pos)
if len(pos_pmov_lst) == 0:
return False
for ppos in pos_pmov_lst:
pop_inst = self.Instructions[ppos] # get last pop_mov inst in case there are more
pop_op = make_op(pop_inst, 1, self.catch_value)
pos_mov_lst = self.get_subsequent(Instruction.is_mov, pos)
op_pos = ppos
for pos_mov in pos_mov_lst:
pos_mov_inst = self.Instructions[pos_mov]
if(pos_mov_inst.is_write_stack()):
return False
if((get_reg_class(pop_inst.get_op_str(1)) ==
get_reg_class(pos_mov_inst.get_op_str(2))) and
get_reg_class(pop_inst.get_op_str(1))): #maybe too weak
pop_op = make_op(pos_mov_inst, 1, self.catch_value)
op_pos = pos_mov
if(not self.Instructions[op_pos].op_is_mem(1)):
return False
add_value = self.Instructions[pos].get_op_value(2)
self.Pseudocode = PseudoInstruction('vpop', self.addr,
[pop_op], add_value)
#print 'vpop'
return True
#TODO add with two regs
def is_add(self):
"""
@brief Tests if the VmInstruction is a 'vadd'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.Instructions):
if(inst.is_add() and not inst.op_is_imm(2)):
break
else: # no break
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
# mit opstr?
opstr = self.Instructions[pos].get_op_str(2)
for pos0 in pos_mov:
if opstr == self.Instructions[pos0].get_op_str(1):
self.Pseudocode = PseudoInstruction('vadd', self.addr,
[make_op(self.Instructions[pos], 1, self.catch_value),
make_op(self.Instructions[pos0], 2, self.catch_value)], SV.dissassm_type / 8)
break
else:
return False
return True
def is_nor(self):
"""
@brief Tests if the VmInstruction is a 'vnor'.
If True sets the PseudoInstruction
"""
# 1. search for and with 2 different registers
and_found = False
reg0 = ''
reg1 = ''
and_size = 0
for pos, inst in enumerate(self.Instructions):
if inst.is_and():
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
and_size = inst.get_mov_size()
if reg0 != reg1:
and_found = True
break
if not and_found:
return False
pos_not = self.get_previous(Instruction.is_not, pos)
#if len(pos_not) < 1 or len(pos_not) > 2:
# return False
not_size = 0
for posn in pos_not:
not_size += (self.Instructions[posn].Instruction.operands[0].size / 8)
if(not_size != 2 * and_size):
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
#if len(pos_mov) != 2:
# return False
mov_r0 = False
mov_r1 = False
op1 = make_op(self.Instructions[pos], 1, self.catch_value)
op2 = make_op(self.Instructions[pos], 2, self.catch_value)
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
if mov_r0:
op1 = make_op(self.Instructions[pos_reg0], 2, self.catch_value)
if mov_r1:
op2 = make_op(self.Instructions[pos_reg1], 2, self.catch_value)
#quick fix correct !!!
if(op1.register == 'ebp') and (and_size == 2):
op1 = op1.replace('+0x4', '+0x2')
self.Pseudocode = PseudoInstruction('vnor', self.addr, [op1, op2], and_size)
return True
def is_jmp(self):
"""
@brief Tests if the VmInstruction is a 'vjmp'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.all_instructions):
if(inst.is_add_basepointer()):
break
else : # no break
return False
prev_pos = 0
while prev_pos < pos:
if self.all_instructions[prev_pos].is_isp_mov():
break
prev_pos = prev_pos + 1
else: # no break
return False
add_value = self.all_instructions[pos].get_op_value(2)
self.Pseudocode = PseudoInstruction(
'vjmp', self.addr,
[make_op(self.all_instructions[prev_pos], 2, self.catch_value)], add_value)
return True
def is_write(self):
"""
@brief Tests if the VmInstruction is a 'vwrite'.
If True sets the PseudoInstruction
"""
reg0 = ''
reg1 = ''
mov_size = 0
sub_size = 0
for pos, inst in enumerate(self.all_instructions):
if inst.op_is_mem(1) and not inst.is_write_stack():
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
mov_size = inst.get_mov_size()
break
else: # no break
return False
for subpos, inst in enumerate(self.Instructions):
if(inst.is_add_basepointer()):
sub_size = inst.get_op_value(2)
break
else : # no break
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
mov_r0 = False
mov_r1 = False
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
if mov_r0 and mov_r1:
op1_inst = self.Instructions[pos_reg0]
op1 = PseudoOperand(PI.REFERENCE_T, op1_inst.get_op_str(2),
op1_inst.get_op_size(2), op1_inst.get_reg_name(2))
op2 = make_op(self.Instructions[pos_reg1], 2, self.catch_value)
self.Pseudocode = PseudoInstruction('vwrite', self.addr,
[op1, op2], mov_size, PI.WRITE_T, PI.IN2_OUT0, sub_size)
return True
else:
return False
def is_read(self):
"""
@brief Tests if the VmInstruction is a 'vread'.
If True sets the PseudoInstruction
"""
reg0 = ''
reg1 = ''
mov_size = 0
for pos, inst in enumerate(self.all_instructions):
if inst.op_is_mem(2) and not inst.is_read_stack():
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
mov_size = inst.get_mov_size()
break
else: # no break
return False
prev_mov = self.get_previous(Instruction.is_mov, pos)
post_mov = self.get_subsequent(Instruction.is_mov, pos)
for prev_pos in prev_mov:
if(get_reg_class(reg1) ==
get_reg_class(self.Instructions[prev_pos].get_reg_name(1))):
break
else: # no break
return False
for post_pos in post_mov:
if(get_reg_class(reg0) ==
get_reg_class(self.Instructions[post_pos].get_reg_name(2))):
push_size = self.Instructions[post_pos].get_mov_size()
break
else: # no break
return False
# wta = write to address
#if mov_size == 1:
op1 = make_op(self.Instructions[post_pos], 1, self.catch_value)
op2_inst = self.Instructions[prev_pos]
op2 = PseudoOperand(PI.REFERENCE_T, op2_inst.get_op_str(2),
op2_inst.get_op_size(2), op2_inst.get_reg_name(2))
self.Pseudocode = PseudoInstruction('vread', self.addr,
[op1, op2], mov_size, PI.READ_T, PI.IN1_OUT1 , push_size)
return True
def is_shift_right(self):
"""
@brief Tests if the VmInstruction is a 'vshr'.
If True sets the PseudoInstruction
"""
# 1. search for and with 2 different registers
and_found = False
reg0 = ''
reg1 = ''
for pos, inst in enumerate(self.Instructions):
if inst.is_shr() and inst.op_is_reg(1) and inst.op_is_reg(2):
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
if reg0 != reg1:
and_found = True
break
if not and_found:
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
if len(pos_mov) != 2:
return False
mov_r0 = False
mov_r1 = False
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
post_mov = self.get_subsequent(Instruction.is_mov, pos)
for save_mov in post_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[save_mov].get_reg_name(2))):
ret_size = self.Instructions[save_mov].get_mov_size()
break
else: # no break
return False
if mov_r0 and mov_r1:
# TODO byte word usw...
self.Pseudocode = PseudoInstruction('vshr', self.addr,
[make_op(self.Instructions[pos_reg0], 2, self.catch_value),
make_op(self.Instructions[pos_reg1], 2, self.catch_value)],
ret_size)
return True
else:
return False
def is_shift_left(self):
"""
@brief Tests if the VmInstruction is a 'vshl'.
If True sets the PseudoInstruction
"""
# 1. search for and with 2 different registers
and_found = False
reg0 = ''
reg1 = ''
for pos, inst in enumerate(self.Instructions):
if inst.is_shl() and inst.op_is_reg(1) and inst.op_is_reg(2):
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
if reg0 != reg1:
and_found = True
break
if not and_found:
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
if len(pos_mov) != 2:
return False
mov_r0 = False
mov_r1 = False
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
post_mov = self.get_subsequent(Instruction.is_mov, pos)
for save_mov in post_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[save_mov].get_reg_name(2))):
ret_size = self.Instructions[save_mov].get_mov_size()
break
else: # no break
return False
if mov_r0 and mov_r1:
# TODO byte word usw...
self.Pseudocode = PseudoInstruction('vshl', self.addr,
[make_op(self.Instructions[pos_reg0], 2, self.catch_value),
make_op(self.Instructions[pos_reg1], 2, self.catch_value)],
ret_size)
return True
else:
return False
def is_shrd(self):
"""
@brief Tests if the VmInstruction is a 'vshrd'.
If True sets the PseudoInstruction
"""
and_found = False
reg0 = ''
reg1 = ''
reg2 = ''
for pos, inst in enumerate(self.Instructions):
if (inst.is_shrd() and inst.op_is_reg(1) and inst.op_is_reg(2)
and inst.op_is_reg(3)):
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
reg2 = inst.get_reg_name(3)
if reg0 != reg1:
and_found = True
break
if not and_found:
return False
prev_mov = self.get_previous(Instruction.is_mov, pos)
for prev_pos0 in prev_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[prev_pos0].get_reg_name(1))):
break
else: # no break
return False
for prev_pos1 in prev_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[prev_pos1].get_reg_name(1))):
break
else: # no break
return False
for prev_pos2 in prev_mov:
if (get_reg_class(reg2) ==
get_reg_class(self.Instructions[prev_pos2].get_reg_name(1))):
break
else: # no break
return False
self.Pseudocode = PseudoInstruction('vshrd', self.addr,
[make_op(self.Instructions[prev_pos0], 2, self.catch_value),
make_op(self.Instructions[prev_pos1], 2, self.catch_value),
make_op(self.Instructions[prev_pos2], 2, self.catch_value)])
return True
def is_shld(self):
"""
@brief Tests if the VmInstruction is a 'vshld'.
If True sets the PseudoInstruction
"""
and_found = False
reg0 = ''
reg1 = ''
reg2 = ''
for pos, inst in enumerate(self.Instructions):
if (inst.is_shld() and inst.op_is_reg(1) and inst.op_is_reg(2)
and inst.op_is_reg(3)):
reg0 = inst.get_reg_name(1)
reg1 = inst.get_reg_name(2)
reg2 = inst.get_reg_name(3)
if reg0 != reg1:
and_found = True
break
if not and_found:
return False
prev_mov = self.get_previous(Instruction.is_mov, pos)
for prev_pos0 in prev_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[prev_pos0].get_reg_name(1))):
break
else: # no break
return False
for prev_pos1 in prev_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[prev_pos1].get_reg_name(1))):
break
else: # no break
return False
for prev_pos2 in prev_mov:
if (get_reg_class(reg2) ==
get_reg_class(self.Instructions[prev_pos2].get_reg_name(1))):
break
else: # no break
return False
self.Pseudocode = PseudoInstruction('vshld', self.addr,
[make_op(self.Instructions[prev_pos0], 2, self.catch_value),
make_op(self.Instructions[prev_pos1], 2, self.catch_value),
make_op(self.Instructions[prev_pos2], 2, self.catch_value)])
return True
def is_vcall(self):
"""
@brief Tests if the VmInstruction is a 'vcall'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.Instructions):
if(inst.is_call()):
break
else : # no break
return False
op1 = self.Instructions[pos].get_op_str(1)
prev_mov = self.get_previous(Instruction.is_mov, pos)
for prev_pos in prev_mov:
if (get_reg_class(self.Instructions[pos].get_reg_name(1)) ==
get_reg_class(self.Instructions[prev_pos].get_reg_name(1))):
op1 = make_op(self.Instructions[prev_pos], 2, self.catch_value)
self.Pseudocode = PseudoInstruction('vcall', self.addr, [op1])
return True
def is_vret(self):
"""
@brief Tests if the VmInstruction is a 'vret'.
If True sets the PseudoInstruction
"""
for pos, inst in enumerate(self.Instructions):
if(inst.is_ret()):
break
else : # no break
return False
self.Pseudocode = PseudoInstruction('vret', self.addr)
return True
def is_mov_ebp(self):
"""
@brief Tests if the VmInstruction is a 'vebp_mov'.
If True sets the PseudoInstruction
"""
op1 = ''
op2 = ''
for pos, inst in enumerate(self.Instructions):
if(inst.is_mov() and
get_reg_class(inst.get_reg_name(1)) == get_reg_class('ebp') and
get_reg_class(inst.get_reg_name(2)) == get_reg_class('ebp')):
op1 = make_op(inst, 1, self.catch_value)
op2 = make_op(inst, 2, self.catch_value)
break
else : # no break
return False
self.Pseudocode = PseudoInstruction('vebp_mov', self.addr, [op1, op2])
return True
def is_imul(self):
"""
@brief Tests if the VmInstruction is a 'vimul'.
If True sets the PseudoInstruction
"""
reg0 = ''
reg1 = ''
mul_found = False
for pos, inst in enumerate(self.Instructions):
if (inst.is_imul() and inst.op_is_reg(1)):
reg0 = inst.get_reg_name(1)
if inst.get_reg_name(2) == None:
reg1 = get_reg_by_size(get_reg_class('eax'), SV.dissassm_type)
else:
reg1 = inst.get_reg_name(2)
if reg0 != reg1:
mul_found = True
break
if not mul_found:
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
if mov_r0 and mov_r1:
self.Pseudocode = PseudoInstruction('vimul', self.addr,
[make_op(self.Instructions[pos_reg0], 2, self.catch_value),
make_op(self.Instructions[pos_reg1], 2, self.catch_value)],
SV.dissassm_type / 8, PI.IMUL_T, PI.IN2_OUT3)
return True
else:
return False
def is_idiv(self):
"""
@brief Tests if the VmInstruction is a 'vimul'.
If True sets the PseudoInstruction
"""
reg0 = ''
reg1 = ''
op_name = ''
div_found = False
for pos, inst in enumerate(self.Instructions):
if (inst.is_idiv()):
reg0 = get_reg_by_size(get_reg_class('eax'), SV.dissassm_type)
reg1 = get_reg_by_size(get_reg_class('edx'), SV.dissassm_type)
op_name = inst.get_op_str(1)
div_found = True
if not div_found:
return False
pos_mov = self.get_previous(Instruction.is_mov, pos)
for pos_reg0 in pos_mov:
if (get_reg_class(reg0) ==
get_reg_class(self.Instructions[pos_reg0].get_reg_name(1))):
mov_r0 = True
break
for pos_reg1 in pos_mov:
if (get_reg_class(reg1) ==
get_reg_class(self.Instructions[pos_reg1].get_reg_name(1))):
mov_r1 = True
break
if mov_r0 and mov_r1:
self.Pseudocode = PseudoInstruction('vidiv', self.addr,
[make_op(self.Instructions[pos_reg0], 2, self.catch_value),
make_op(self.Instructions[pos_reg1], 2, self.catch_value),
make_op(self.Instructions[pos], 1, self.catch_value)],
SV.dissassm_type / 8, PI.DIV_T, PI.IN3_OUT3)
return True
else:
return False
|
anatolikalysch/VMAttack
|
lib/VmInstruction.py
|
Python
|
mit
| 33,455
|
## -*- coding: utf-8 -*-
# Copyright © 2011-2012 Mike Fled <[email protected]>
import codecs, os, re, sys
# encoding used for the text files
_file_encoding = "utf-8"
# get current working directory
_cur_dir = os.path.dirname(os.path.realpath(__file__))
# get version string from the yarest package
_ver_file = os.path.join(_cur_dir, "yarest", "__init__.py")
_ver_pattern = r"^__version__ = ['\"]([^'\"]*)['\"]"
_ver_data = codecs.open(_ver_file, "r", _file_encoding).read()
_ver_matches = re.search(_ver_pattern, _ver_data, re.M)
if _ver_matches:
_ver_string = _ver_matches.group(1)
else:
raise RuntimeError("Couldn't find version info in '%s'" % (_ver_file))
# create the long description
_readme_file = os.path.join(_cur_dir, "README.txt")
_readme_data = codecs.open(_readme_file, "r", _file_encoding).read()
_change_file = os.path.join(_cur_dir, "CHANGELOG.txt")
_change_data = codecs.open(_change_file, "r", _file_encoding).read()
_long_description = _readme_data + "\n\n" + _change_data
# embed the gui resource files if creating a source distribution
if len(sys.argv) >= 2 and sys.argv[1] == "sdist":
sys.path.append(os.path.join(_cur_dir, "resources"))
import embed_docs
embed_docs.embed()
import embed_images
embed_images.embed()
sys.path.remove(os.path.join(_cur_dir, "resources"))
# in the event we want to package just the core API and
# can already guarantee dependencies are met, then the
# following should allow for distutils to do the setup
#
# from distutils.core import setup
# _packages = [ "yarest" ]
# _kw = {}
#
# the gui however depends on entry_points being available
# so we import setuptools if installed or use distribute
#
# we could also in theory use distutils with the gui, to
# do so we would have to supply our own launcher script;
# see the "yarest_ui.py" testing script for the example.
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
_packages = [ "yarest", "yarest.gui" ]
_kw = { "entry_points": { "gui_scripts": ["yarest=yarest.gui:run"] },
"install_requires": [ "ssh", "configobj", "psutil" ],
"zip_safe": False}
setup(name = "yarest",
version = _ver_string,
description = "Yet Another REmote Support Tool",
long_description = _long_description,
author = "Mike Fled",
author_email = "[email protected]",
url = "http://code.google.com/p/yarest/",
packages = _packages,
license = "MIT",
platforms = "Posix; MacOS X; Windows",
classifiers = [ "Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: Security",
"Topic :: System :: Networking" ],
**_kw
)
|
tectronics/yarest
|
setup.py
|
Python
|
mit
| 3,188
|
from django.apps import AppConfig
class CreatorConfig(AppConfig):
name = 'creator'
|
HenryCorse/Project_Southstar
|
CharacterApp/creator/apps.py
|
Python
|
mit
| 89
|
#!/usr/bin/env python3
# transpiled with BefunCompile v1.3.0 (c) 2017
import gzip, base64
_g = ("AR+LCAAAAAAABACdUDGOAyEM/AoHW7FBYna5XIKQdQ9B3BUr0VJZKXj8mZAUKXMuzGA8nsHsse3h8/x1uaq3g/RxHNpa8PtcxQ3btQEu/YP8NMA0pWdODzAm0sSU4TLf"
+ "qw1hRUVItKFGrJ36QD5ThIum/DDZPM4ldiHuaApBkqAaUC1Qfz/6Q3l59bFAFZFs54tluRSpdadvWlUfc8pIojt9jfge7p5hijfJsDenVZk05/L9nbDmYQWzscjCnHxg"
+ "G0uzA4WKvQIqlSxa2WmvRY+MUwbKLDJOWJP8B/NXo/XoAQAA")
g = base64.b64decode(_g)[1:]
for i in range(base64.b64decode(_g)[0]):
g = gzip.decompress(g)
g=list(g)
def gr(x,y):
if(x>=0 and y>=0 and x<61 and y<8):
return g[y*61 + x];
return 0;
def gw(x,y,v):
if(x>=0 and y>=0 and x<61 and y<8):
g[y*61 + x]=v;
def td(a,b):
return ((0)if(b==0)else(a//b))
def tm(a,b):
return ((0)if(b==0)else(a%b))
s=[]
def sp():
global s
if (len(s) == 0):
return 0
return s.pop()
def sa(v):
global s
s.append(v)
def sr():
global s
if (len(s) == 0):
return 0
return s[-1]
def _0():
gw(1,1,999999)
gw(2,1,9)
return 1
def _1():
global t0
t0=gr(2,1)
return (3)if(gr(2,1)!=-1)else(2)
def _2():
return 24
def _3():
global t0
return (4)if((t0)!=0)else(23)
def _4():
sa(0)
sa(gr(2,1))
sa(gr(2,1)-1)
sa(gr(2,1)-1)
return 5
def _5():
return (22)if(sp()!=0)else(6)
def _6():
sp();
sa(sp()*1)
return 7
def _7():
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
return (21)if(sp()!=0)else(8)
def _8():
sp();
sa(sr());
return (9)if(sp()!=0)else(20)
def _9():
gw(3,1,sp())
return 10
def _10():
gw(4,1,1)
return 11
def _11():
return (12)if((gr(3,1)*gr(4,1))>gr(1,1))else(19)
def _12():
sa(gr(4,1))
return 13
def _13():
sa(1)
sa(gr(1,0)-120)
return 14
def _14():
return (18)if(sp()!=0)else(15)
def _15():
sa(sp()+1)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
return (17)if(sp()!=0)else(16)
def _16():
global t0
sp();
sa(sp()-1)
sa(sr());
sa(0)
v0=sp()
t0=gr(sp(),v0)
t0=t0-48
sa(120)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(0)
v0=sp()
v1=sp()
gw(v1,v0,sp())
gw(1,1,gr(1,1)-(gr(3,1)*(gr(4,1)-1)))
gw(2,1,gr(2,1)-1)
t0=t0+48
print(chr(t0),end="",flush=True)
return 1
def _17():
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
sa(0)
v0=sp()
sa(gr(sp(),v0))
sa(sp()-120)
return 14
def _18():
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()-1)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
return 15
def _19():
gw(4,1,gr(4,1)+1)
return 11
def _20():
gw(3,1,1)
sp();
return 10
def _21():
sa(sp()*sp());
return 7
def _22():
sa(sr()-1)
sa(sr());
return 5
def _23():
global t0
t0=0
sa(1)
return 13
m=[_0,_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,_21,_22,_23]
c=0
while c<24:
c=m[c]()
|
Mikescher/Project-Euler_Befunge
|
compiled/Python3/Euler_Problem-024.py
|
Python
|
mit
| 2,977
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tomato.tests
"""
import unittest
import msgpack
import time
try:
from PIL import Image
except ImportError:
import Image
from tomato.swf_processor import Swf
from tomato.exceptions_tomato import MovieClipDoesNotExist
from tomato.utils import bits_list2string, Bits, SignedBits as SB, FixedPointBits as FB, MATRIX
def test_matrix(scale=None, rotate=None, translate=(0,0)):
m1 = MATRIX().generate(
scale=scale,
translate=translate,
rotate=rotate
)
m2 = MATRIX()
if scale:
m2.setattr_value('scale_x', scale[0])
m2.setattr_value('scale_y', scale[1])
if rotate:
m2.setattr_value('rotate_skew0', rotate[0])
m2.setattr_value('rotate_skew1', rotate[1])
if translate:
m2.setattr_value('translate_x', translate[0])
m2.setattr_value('translate_y', translate[1])
m2.generate_bits()
return m1.value == m2.value
class TestSwfProcessor(unittest.TestCase):
def setUp(self):
self.swf_bitmap = Swf(open('sample/bitmap/bitmap.swf').read())
self.swf_tank = Swf(open('sample/mc/tank.swf').read())
def test_bits(self):
int_num = 31415
signed_num = -27182
float_num = 1.6180339
self.assertEqual(int_num, int(Bits(int_num)))
self.assertEqual(signed_num, int(SB(signed_num)))
self.assertAlmostEqual(float_num, float(FB(float_num)), 4)
def test_bits2string(self):
spam_string = "This is a spam!"
self.assertEqual(spam_string, bits_list2string([Bits(spam_string)]))
def test_matrixes(self):
self.assertEqual(True, test_matrix())
self.assertEqual(True, test_matrix(translate=(1250, 744)))
self.assertEqual(True, test_matrix(scale=(2,4, 3.7)))
self.assertEqual(True, test_matrix(scale=(-55, -66), translate=(1250, 744)))
self.assertEqual(True, test_matrix(rotate=(-2.4, -3.8)))
self.assertEqual(True, test_matrix(rotate=(33, 66), translate=(1250, 744)))
self.assertEqual(True, test_matrix(scale=(77, 44), rotate=(1,5, -3.7)))
self.assertEqual(True, test_matrix(translate=(1250, 744), rotate=(-1, -1), scale=(-3, -1)))
def test_fields_io_serialize_and_deserialize(self):
m1 = MATRIX().generate(
scale=(2.4, 3.7),
translate=(1500, 1500))
tpl = m1.serialize()
m2 = MATRIX().deserialize(tpl)
self.assertEqual(m1.value, m2.value)
def test_getting_movie_clip(self):
self.assertNotEqual(None, self.swf_tank.get_movie_clip('kombu'))
self.assertRaises(MovieClipDoesNotExist,
self.swf_bitmap.get_movie_clip, 'this_is_not_spam')
def test_delete_movie_clip(self):
self.swf_tank.delete_movie_clip('kombu')
self.swf_tank.write(open('sample/mc/tank_without_kombu.swf', 'w'))
def test_copy_swf(self):
c_tank = self.swf_tank.copy()
c_bitmap = self.swf_bitmap.copy()
self.assertEqual(c_tank.write(), self.swf_tank.write())
self.assertEqual(c_bitmap.write(), self.swf_bitmap.write())
c_tank.write(open('sample/mc/copy_tank.swf', 'w'))
c_bitmap.write(open('sample/mc/copy_bitmap.swf', 'w'))
if __name__ == '__main__':
unittest.main()
|
buhii/tomato
|
tests.py
|
Python
|
mit
| 3,294
|
# Copyright (c) 2014 Pier Carlo Chiodi - http://www.pierky.com
# Licensed under The MIT License (MIT) - http://opensource.org/licenses/MIT
#
# The MIT License (MIT)
# =====================
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Part of this work is based on Google Python IP address manipulation library
# (https://code.google.com/p/ipaddr-py/).
"""A Python library to gather IP address details (ASN, prefix, resource holder, reverse DNS) using the
RIPEStat API, with a basic cache to avoid flood of requests and to enhance performance."""
__version__ = "0.1"
# Usage
# =====
#
# Import the library, then setup a cache object and use it to gather IP address details.
# The cache object will automatically load and save data to the local cache files.
#
# Optionally, the cache object may be instantiated with the following arguments:
# - IP_ADDRESSES_CACHE_FILE, path to the file where IP addresses cache will be stored (default: "ip_addr.cache");
# - IP_PREFIXES_CACHE_FILE, path to the file where IP prefixes cache will be stored (default: "ip_pref.cache");
# - MAX_CACHE, expiration time for cache entries, in seconds (default: 604800, 1 week);
# - Debug, set to True to enable some debug messages (default: False).
#
# Results are given in a dictionary containing the following keys: ASN, Holder, Prefix, HostName, TS (time stamp).
#
# Hostname is obtained using the local socket.getfqdn function.
#
# import ipdetailscache
# cache = ipdetailscache.IPDetailsCache( IP_ADDRESSES_CACHE_FILE = "ip_addr.cache", IP_PREFIXES_CACHE_FILE = "ip_pref.cache", MAX_CACHE = 604800, Debug = False );
# result = cache.GetIPInformation( "IP_ADDRESS" )
#
# Example
# =======
#
# :~# python
# Python 2.7.2+ (default, Jul 20 2012, 22:15:08)
# [GCC 4.6.1] on linux2
# Type "help", "copyright", "credits" or "license" for more information.
# >>> import ipdetailscache
# >>> cache = ipdetailscache.IPDetailsCache();
# >>> result = cache.GetIPInformation( "193.0.6.139" )
# >>> result
# {u'Prefix': u'193.0.0.0/21', u'HostName': u'www.ripe.net', u'Holder': u'RIPE-NCC-AS Reseaux IP Europeens Network Coordination Centre (RIPE NCC),NL', u'TS': 1401781240, u'ASN': u'3333'}
import os.path
import time
import json
import ipaddr # http://code.google.com/p/ipaddr-py/ - pip install ipaddr
import socket
import urllib2
class IPDetailsCache():
def _Debug(self, s):
if self.Debug:
print("DEBUG - IPDetailsCache - %s" % s)
# IPPrefixesCache[<ip prefix>]["TS"]
# IPPrefixesCache[<ip prefix>]["ASN"]
# IPPrefixesCache[<ip prefix>]["Holder"]
# IPAddressesCache[<ip>]["TS"]
# IPAddressesCache[<ip>]["ASN"]
# IPAddressesCache[<ip>]["Holder"]
# IPAddressesCache[<ip>]["Prefix"]
# IPAddressesCache[<ip>]["HostName"]
def GetIPInformation( self, in_IP ):
Result = {}
Result["TS"] = 0
Result["ASN"] = ""
Result["Holder"] = ""
Result["Prefix"] = ""
Result["HostName"] = ""
IP = in_IP
if not IP in self.IPAddressObjects:
self.IPAddressObjects[IP] = ipaddr.IPAddress(IP)
if self.IPAddressObjects[IP].version == 4:
if self.IPAddressObjects[IP].is_private:
Result["ASN"] = "unknown"
return Result
if self.IPAddressObjects[IP].version == 6:
if self.IPAddressObjects[IP].is_reserved or \
self.IPAddressObjects[IP].is_link_local or \
self.IPAddressObjects[IP].is_site_local or \
self.IPAddressObjects[IP].is_private or \
self.IPAddressObjects[IP].is_multicast or \
self.IPAddressObjects[IP].is_unspecified:
Result["ASN"] = "unknown"
return Result
if IP != self.IPAddressObjects[IP].exploded:
IP = self.IPAddressObjects[IP].exploded
if not IP in self.IPAddressObjects:
self.IPAddressObjects[IP] = ipaddr.IPAddress(IP)
if IP in self.IPAddressesCache:
if self.IPAddressesCache[IP]["TS"] >= int(time.time()) - self.MAX_CACHE:
Result = self.IPAddressesCache[IP]
self._Debug("IP address cache hit for %s" % IP)
return Result
else:
self._Debug("Expired IP address cache hit for %s" % IP)
for IPPrefix in self.IPPrefixesCache:
if self.IPPrefixesCache[IPPrefix]["TS"] >= int(time.time()) - self.MAX_CACHE:
if not IPPrefix in self.IPPrefixObjects:
self.IPPrefixObjects[IPPrefix] = ipaddr.IPNetwork( IPPrefix )
if self.IPPrefixObjects[IPPrefix].Contains( self.IPAddressObjects[IP] ):
Result["TS"] = self.IPPrefixesCache[IPPrefix]["TS"]
Result["ASN"] = self.IPPrefixesCache[IPPrefix]["ASN"]
Result["Holder"] = self.IPPrefixesCache[IPPrefix].get("Holder","")
Result["Prefix"] = IPPrefix
self._Debug("IP prefix cache hit for %s (prefix %s)" % ( IP, IPPrefix ) )
break
if Result["ASN"] == "":
self._Debug("No cache hit for %s" % IP )
URL = "https://stat.ripe.net/data/prefix-overview/data.json?resource=%s" % IP
obj = json.loads( urllib2.urlopen(URL).read() )
if obj["status"] == "ok":
Result["TS"] = int(time.time())
if obj["data"]["asns"] != []:
try:
Result["ASN"] = str(obj["data"]["asns"][0]["asn"])
Result["Holder"] = obj["data"]["asns"][0]["holder"]
Result["Prefix"] = obj["data"]["resource"]
self._Debug("Got data for %s: ASN %s, prefix %s" % ( IP, Result["ASN"], Result["Prefix"] ) )
except:
Result["ASN"] = "unknown"
self._Debug("No data for %s" % IP )
else:
Result["ASN"] = "not announced"
Result["Holder"] = ""
Result["Prefix"] = obj["data"]["resource"]
if Result["ASN"].isdigit() or Result["ASN"] == "not announced":
HostName = socket.getfqdn(IP)
if HostName == IP or HostName == "":
Result["HostName"] = "unknown"
else:
Result["HostName"] = HostName
if not IP in self.IPAddressesCache:
self.IPAddressesCache[IP] = {}
self._Debug("Adding %s to addresses cache" % IP)
self.IPAddressesCache[IP]["TS"] = Result["TS"]
self.IPAddressesCache[IP]["ASN"] = Result["ASN"]
self.IPAddressesCache[IP]["Holder"] = Result["Holder"]
self.IPAddressesCache[IP]["Prefix"] = Result["Prefix"]
self.IPAddressesCache[IP]["HostName"] = Result["HostName"]
if Result["Prefix"] != "":
IPPrefix = Result["Prefix"]
if not IPPrefix in self.IPPrefixesCache:
self.IPPrefixesCache[ IPPrefix ] = {}
self._Debug("Adding %s to prefixes cache" % IPPrefix)
self.IPPrefixesCache[IPPrefix]["TS"] = Result["TS"]
self.IPPrefixesCache[IPPrefix]["ASN"] = Result["ASN"]
self.IPPrefixesCache[IPPrefix]["Holder"] = Result["Holder"]
return Result
def SaveCache( self ):
# Save IP addresses cache
self._Debug("Saving IP addresses cache to %s" % self.IP_ADDRESSES_CACHE_FILE)
with open( self.IP_ADDRESSES_CACHE_FILE, "w" ) as outfile:
json.dump( self.IPAddressesCache, outfile )
# Save IP prefixes cache
self._Debug("Saving IP prefixes cache to %s" % self.IP_PREFIXES_CACHE_FILE)
with open( self.IP_PREFIXES_CACHE_FILE, "w" ) as outfile:
json.dump( self.IPPrefixesCache, outfile )
def __init__( self, IP_ADDRESSES_CACHE_FILE = "ip_addr.cache", IP_PREFIXES_CACHE_FILE = "ip_pref.cache", MAX_CACHE = 604800, Debug = False ):
self.IPAddressesCache = {}
self.IPPrefixesCache = {}
self.IPAddressObjects = {}
self.IPPrefixObjects = {}
self.IP_ADDRESSES_CACHE_FILE = IP_ADDRESSES_CACHE_FILE
self.IP_PREFIXES_CACHE_FILE = IP_PREFIXES_CACHE_FILE
self.MAX_CACHE = MAX_CACHE
self.Debug = Debug
# Load IP addresses cache
if os.path.exists( self.IP_ADDRESSES_CACHE_FILE ):
self._Debug("Loading IP addresses cache from %s" % self.IP_ADDRESSES_CACHE_FILE)
json_data = open( self.IP_ADDRESSES_CACHE_FILE )
self.IPAddressesCache = json.load( json_data )
json_data.close()
else:
self._Debug("No IP addresses cache file found: %s" % self.IP_ADDRESSES_CACHE_FILE)
# Load IP prefixes cache
if os.path.exists( self.IP_PREFIXES_CACHE_FILE ):
self._Debug("Loading IP prefixes cache from %s" % self.IP_PREFIXES_CACHE_FILE)
json_data = open( self.IP_PREFIXES_CACHE_FILE )
self.IPPrefixesCache = json.load( json_data )
json_data.close()
else:
self._Debug("No IP prefixes cache file found: %s" % self.IP_PREFIXES_CACHE_FILE)
# Test write access to IP addresses cache file
self._Debug("Testing write permissions on IP addresses cache file")
with open( self.IP_ADDRESSES_CACHE_FILE, "w" ) as outfile:
outfile.close()
self._Debug("Write permissions on IP addresses cache file OK")
# Test write access to IP prefixes cache file
self._Debug("Testing write permissions on IP prefixes cache file")
with open( self.IP_PREFIXES_CACHE_FILE, "w" ) as outfile:
outfile.close()
self._Debug("Write permissions on IP prefixes cache file OK")
def __del__( self ):
self.SaveCache()
|
mathias4github/ripe-atlas-traceroute2kml
|
ipdetailscache.py
|
Python
|
mit
| 9,640
|
#!C:\Users\ponto frio\PycharmProjects\MarceloSandovalScript\backend\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'Unidecode==0.4.18','console_scripts','unidecode'
__requires__ = 'Unidecode==0.4.18'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('Unidecode==0.4.18', 'console_scripts', 'unidecode')()
)
|
marcelosandoval/tekton
|
backend/venv/Scripts/unidecode-script.py
|
Python
|
mit
| 388
|
# postgresql/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import base
from . import pg8000 # noqa
from . import psycopg2 # noqa
from . import psycopg2cffi # noqa
from . import pygresql # noqa
from . import pypostgresql # noqa
from .array import All
from .array import Any
from .array import ARRAY
from .array import array
from .base import BIGINT
from .base import BIT
from .base import BOOLEAN
from .base import BYTEA
from .base import CHAR
from .base import CIDR
from .base import CreateEnumType
from .base import DATE
from .base import DOUBLE_PRECISION
from .base import DropEnumType
from .base import ENUM
from .base import FLOAT
from .base import INET
from .base import INTEGER
from .base import INTERVAL
from .base import MACADDR
from .base import MONEY
from .base import NUMERIC
from .base import OID
from .base import REAL
from .base import REGCLASS
from .base import SMALLINT
from .base import TEXT
from .base import TIME
from .base import TIMESTAMP
from .base import TSVECTOR
from .base import UUID
from .base import VARCHAR
from .dml import Insert
from .dml import insert
from .ext import aggregate_order_by
from .ext import array_agg
from .ext import ExcludeConstraint
from .hstore import HSTORE
from .hstore import hstore
from .json import JSON
from .json import JSONB
from .ranges import DATERANGE
from .ranges import INT4RANGE
from .ranges import INT8RANGE
from .ranges import NUMRANGE
from .ranges import TSRANGE
from .ranges import TSTZRANGE
base.dialect = dialect = psycopg2.dialect
__all__ = (
"INTEGER",
"BIGINT",
"SMALLINT",
"VARCHAR",
"CHAR",
"TEXT",
"NUMERIC",
"FLOAT",
"REAL",
"INET",
"CIDR",
"UUID",
"BIT",
"MACADDR",
"MONEY",
"OID",
"REGCLASS",
"DOUBLE_PRECISION",
"TIMESTAMP",
"TIME",
"DATE",
"BYTEA",
"BOOLEAN",
"INTERVAL",
"ARRAY",
"ENUM",
"dialect",
"array",
"HSTORE",
"hstore",
"INT4RANGE",
"INT8RANGE",
"NUMRANGE",
"DATERANGE",
"TSVECTOR",
"TSRANGE",
"TSTZRANGE",
"JSON",
"JSONB",
"Any",
"All",
"DropEnumType",
"CreateEnumType",
"ExcludeConstraint",
"aggregate_order_by",
"array_agg",
"insert",
"Insert",
)
|
graingert/sqlalchemy
|
lib/sqlalchemy/dialects/postgresql/__init__.py
|
Python
|
mit
| 2,432
|
"""
@name: Modules/CXore/Drivers/USB/usb_open.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2011-2020 by D. Brian Kimmel
@license: MIT License
@note: Created on Mar 27, 2011
@summary: This module is for communicating with USB devices.
This will interface various PyHouse modules to a USB device.
This may be instanced as many times as there are USB devices to control.
Instead of using callLater timers, it would be better to use deferred callbacks when data arrives.
"""
__updated__ = '2019-12-30'
# Import system type stuff
import usb.core # type:ignore
import usb.util # type:ignore
# Import PyHouse modules
from Modules.Drivers.USB.Driver_USB_17DD_5500 import Api as usb5500Api # type:ignore
from Modules.Core import logging_pyh as Logger
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
LOG = Logger.getLogger('PyHouse.USBDriver_Open ')
# Timeouts for send/receive delays
RECEIVE_TIMEOUT = 0.3
class lightingUtility(object):
@staticmethod
def format_names(p_USB_obj):
"""
Printable Vendor, Product and controller name
"""
l_ret = "{:#04x}:{:#04x} {}".format(p_USB_obj.Vendor, p_USB_obj.Product, p_USB_obj.Name)
return l_ret
@staticmethod
def is_hid(p_device):
if p_device.bUsbDeviceClass == 3:
return True
class Api(object):
m_controller_obj = None
@staticmethod
def _save_find_device(p_USB_obj, p_device):
p_USB_obj.UsbDevice = p_device
p_USB_obj.num_configs = p_device.bNumConfigurations
p_USB_obj.hid_device = True # lightingUtility.is_hid(p_device)
p_USB_obj.configs = {}
return p_USB_obj
@staticmethod
def _open_find_device(p_USB_obj):
"""First step in opening a USB device.
@return: None if no such device or a pyusb device object
"""
l_vpn = lightingUtility.format_names(p_USB_obj)
l_device = None
try:
l_device = usb.core.find(idVendor=p_USB_obj.Vendor, idProduct=p_USB_obj.Product)
except (usb.USBError, ValueError):
LOG.error("ERROR no such USB device for {}".format(l_vpn))
return None
if l_device == None:
LOG.error('ERROR - USB device not found {}'.format(l_vpn))
return None
LOG.debug(PrettyFormatAny.form(l_device, 'Device'))
LOG.debug(PrettyFormatAny.form(p_USB_obj, 'pUSB_obj'))
p_USB_obj.UsbDevice = Api._save_find_device(p_USB_obj, l_device)
LOG.info('Found a device - HID: {}'.format(l_vpn))
return l_device
@staticmethod
def _setup_detach_kernel(p_USB_obj):
"""Get rid of any kernel device driver that is in our way.
On a restart of PyHouse we expect no such kernel driver to exist.
"""
try:
if not p_USB_obj.UsbDevice.is_kernel_driver_active(0):
return
except usb.USBError:
pass
try:
p_USB_obj.UsbDevice.detach_kernel_driver(0)
except Exception as e:
LOG.error("ERROR in detaching_kernel_driver - {}".format(e))
@staticmethod
def _setup_configurations(p_USB_obj):
"""Now we deal with the USB configuration
1. get all the configs
2. use the 'proper' config.
@param p_usb: is the 'found' device
"""
# TODO don't do if not needed
p_USB_obj.UsbDevice.set_configuration()
p_USB_obj.configs = p_USB_obj.UsbDevice.get_active_configuration()
p_USB_obj.num_interfaces = p_USB_obj.configs.bNumInterfaces
p_USB_obj.interfaces = {}
@staticmethod
def _setup_interfaces(p_USB_obj):
"""
"""
l_interface_number = p_USB_obj.configs[(0, 0)].bInterfaceNumber
l_interface_class = p_USB_obj.configs[(0, 0)].bInterfaceClass
try:
l_alternate_setting = usb.control.get_interface(p_USB_obj.UsbDevice, l_interface_number)
except Exception as e:
LOG.error(" -- Error in alt setting {}".format(e))
l_alternate_setting = 0
l_interface = usb.util.find_descriptor(
p_USB_obj.configs,
bInterfaceNumber=l_interface_number,
bAlternateSetting=l_alternate_setting)
p_USB_obj.num_endpoints = l_interface.bNumEndpoints
p_USB_obj.interface_num = l_interface.bInterfaceNumber
p_USB_obj.interface = l_interface
if l_interface_class == 3:
p_USB_obj.hid_device = True
Api._setup_reports(p_USB_obj)
@staticmethod
def _setup_endpoints(p_USB_obj):
"""We will deal with 2 endpoints here - as that is what I expect a controller to have.
No use in be too general if no device exists that is more complex.
"""
LOG.debug("_setup_endpoints() - Name: {}, endpoint count: {}".format(p_USB_obj.Name, p_USB_obj.num_endpoints))
p_USB_obj.ep_out = usb.util.find_descriptor(
p_USB_obj.interface,
custom_match=lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_OUT)
LOG.debug(" Ep_Out: {}".format(p_USB_obj.ep_out.__dict__))
p_USB_obj.epo_addr = p_USB_obj.ep_out.bEndpointAddress
p_USB_obj.epo_type = p_USB_obj.ep_out.bmAttributes & 0x03
p_USB_obj.epo_packet_size = p_USB_obj.ep_out.wMaxPacketSize
p_USB_obj.ep_in = usb.util.find_descriptor(
p_USB_obj.interface,
custom_match=lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_IN
)
LOG.debug(" Ep_In: {}".format(p_USB_obj.ep_in.__dict__))
p_USB_obj.epi_addr = p_USB_obj.ep_in.bEndpointAddress
p_USB_obj.epi_type = p_USB_obj.ep_in.bmAttributes & 0x03
p_USB_obj.epi_packet_size = p_USB_obj.ep_in.wMaxPacketSize
@staticmethod
def _setup_reports(p_USB_obj):
_l_reports = usb.util.find_descriptor(
p_USB_obj.interface,
custom_match=lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_IN)
@staticmethod
def open_device(p_USB_obj):
p_USB_obj.message = bytearray()
l_vpn = lightingUtility.format_names(p_USB_obj)
LOG.info("Opening USB device - {}".format(l_vpn))
p_USB_obj.UsbDevice = Api._open_find_device(p_USB_obj)
if p_USB_obj.UsbDevice == None:
LOG.error('ERROR - Setup Failed')
return False
Api._setup_detach_kernel(p_USB_obj)
Api._setup_configurations(p_USB_obj)
Api._setup_interfaces(p_USB_obj)
Api._setup_endpoints(p_USB_obj)
_l_control = usb5500Api.Setup()
# _l_msg = lightingUtility.setup_hid_17DD_5500(p_USB_obj)
return True
@staticmethod
def Setup(p_USB_obj):
l_control = usb5500Api.Setup()
return l_control
@staticmethod
def close_device(p_USB_obj):
p_USB_obj.UsbDevice.reset()
# ## END DBK
|
DBrianKimmel/PyHouse
|
Project/src/Modules/Core/Drivers/Usb/Usb_open.py
|
Python
|
mit
| 6,996
|
from django.db import models
from . import managers
class VTM(models.Model):
class Meta:
verbose_name = "Virtual Therapeutic Moiety"
verbose_name_plural = "Virtual Therapeutic Moieties"
ordering = ["nm"]
objects = managers.VTMManager()
obj_type = "vtm"
name_field = "nm"
def __str__(self):
return str(self.id)
id = models.BigIntegerField(
primary_key=True,
db_column="vtmid",
help_text="Identifier",
)
invalid = models.BooleanField(
help_text="Invalid",
)
nm = models.CharField(
max_length=255,
help_text="Name",
)
abbrevnm = models.CharField(
max_length=60,
null=True,
help_text="Abbreviated name",
)
vtmidprev = models.BigIntegerField(
null=True,
help_text="Previous identifier",
)
vtmiddt = models.DateField(
null=True,
help_text="VTM identifier date",
)
def title(self):
return self.nm
class VMP(models.Model):
class Meta:
verbose_name = "Virtual Medicinal Product"
ordering = ["nm"]
objects = managers.VMPManager()
obj_type = "vmp"
name_field = "nm"
def __str__(self):
return str(self.id)
id = models.BigIntegerField(
primary_key=True,
db_column="vpid",
help_text="Identifier",
)
vpiddt = models.DateField(
null=True,
help_text="Date identifier became valid",
)
vpidprev = models.BigIntegerField(
null=True,
help_text="Previous product identifier",
)
vtm = models.ForeignKey(
db_column="vtmid",
to="VTM",
on_delete=models.CASCADE,
null=True,
help_text="VTM",
)
invalid = models.BooleanField(
help_text="Invalid",
)
nm = models.CharField(
max_length=255,
help_text="Name",
)
abbrevnm = models.CharField(
max_length=60,
null=True,
help_text="Abbreviated name",
)
basis = models.ForeignKey(
db_column="basiscd",
to="BasisOfName",
on_delete=models.CASCADE,
help_text="Basis of preferred name",
)
nmdt = models.DateField(
null=True,
help_text="Date of name applicability",
)
nmprev = models.CharField(
max_length=255,
null=True,
help_text="Previous name",
)
basis_prev = models.ForeignKey(
db_column="basis_prevcd",
to="BasisOfName",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Basis of previous name",
)
nmchange = models.ForeignKey(
db_column="nmchangecd",
to="NamechangeReason",
on_delete=models.CASCADE,
null=True,
help_text="Reason for name change",
)
combprod = models.ForeignKey(
db_column="combprodcd",
to="CombinationProdInd",
on_delete=models.CASCADE,
null=True,
help_text="Combination product",
)
pres_stat = models.ForeignKey(
db_column="pres_statcd",
to="VirtualProductPresStatus",
on_delete=models.CASCADE,
help_text="Prescribing status",
)
sug_f = models.BooleanField(
help_text="Sugar free",
)
glu_f = models.BooleanField(
help_text="Gluten free",
)
pres_f = models.BooleanField(
help_text="Preservative free",
)
cfc_f = models.BooleanField(
help_text="CFC free",
)
non_avail = models.ForeignKey(
db_column="non_availcd",
to="VirtualProductNonAvail",
on_delete=models.CASCADE,
null=True,
help_text="Non-availability",
)
non_availdt = models.DateField(
null=True,
help_text="Non-availability status date",
)
df_ind = models.ForeignKey(
db_column="df_indcd",
to="DfIndicator",
on_delete=models.CASCADE,
null=True,
help_text="Dose form",
)
udfs = models.DecimalField(
max_digits=10,
decimal_places=3,
null=True,
help_text="Unit dose form size",
)
udfs_uom = models.ForeignKey(
db_column="udfs_uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Unit dose form units",
)
unit_dose_uom = models.ForeignKey(
db_column="unit_dose_uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Unit dose unit of measure",
)
bnf_code = models.CharField(max_length=15, null=True)
def title(self):
return self.nm
class VPI(models.Model):
class Meta:
verbose_name = "Virtual Product Ingredient"
vmp = models.ForeignKey(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
ing = models.ForeignKey(
db_column="isid",
to="Ing",
on_delete=models.CASCADE,
help_text="Ingredient",
)
basis_strnt = models.ForeignKey(
db_column="basis_strntcd",
to="BasisOfStrnth",
on_delete=models.CASCADE,
null=True,
help_text="Basis of pharmaceutical strength",
)
bs_subid = models.BigIntegerField(
null=True,
help_text="Basis of strength substance identifier",
)
strnt_nmrtr_val = models.DecimalField(
max_digits=10,
decimal_places=3,
null=True,
help_text="Strength value numerator",
)
strnt_nmrtr_uom = models.ForeignKey(
db_column="strnt_nmrtr_uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Strength value numerator unit",
)
strnt_dnmtr_val = models.DecimalField(
max_digits=10,
decimal_places=3,
null=True,
help_text="Strength value denominator",
)
strnt_dnmtr_uom = models.ForeignKey(
db_column="strnt_dnmtr_uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Strength value denominator unit",
)
class Ont(models.Model):
class Meta:
verbose_name = "Ontology Drug Form & Route"
vmp = models.ForeignKey(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
form = models.ForeignKey(
db_column="formcd",
to="OntFormRoute",
on_delete=models.CASCADE,
help_text="Form & Route",
)
class Dform(models.Model):
class Meta:
verbose_name = "Dose Form"
vmp = models.OneToOneField(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
form = models.ForeignKey(
db_column="formcd",
to="Form",
on_delete=models.CASCADE,
help_text="Formulation",
)
class Droute(models.Model):
class Meta:
verbose_name = "Drug Route"
vmp = models.ForeignKey(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
route = models.ForeignKey(
db_column="routecd",
to="Route",
on_delete=models.CASCADE,
help_text="Route",
)
class ControlInfo(models.Model):
class Meta:
verbose_name = "Controlled Drug Prescribing Information"
vmp = models.OneToOneField(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
cat = models.ForeignKey(
db_column="catcd",
to="ControlDrugCategory",
on_delete=models.CASCADE,
help_text="Controlled Drug category",
)
catdt = models.DateField(
null=True,
help_text="Date of applicability",
)
cat_prev = models.ForeignKey(
db_column="cat_prevcd",
to="ControlDrugCategory",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Previous Controlled Drug information",
)
class AMP(models.Model):
class Meta:
verbose_name = "Actual Medicinal Product"
ordering = ["descr"]
objects = managers.AMPManager()
obj_type = "amp"
name_field = "descr"
def __str__(self):
return str(self.id)
id = models.BigIntegerField(
primary_key=True,
db_column="apid",
help_text="Identifier",
)
invalid = models.BooleanField(
help_text="Invalid",
)
vmp = models.ForeignKey(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
nm = models.CharField(
max_length=255,
help_text="Name",
)
abbrevnm = models.CharField(
max_length=60,
null=True,
help_text="Abbreviated name",
)
descr = models.CharField(
max_length=700,
help_text="Description",
)
nmdt = models.DateField(
null=True,
help_text="Date of name applicability",
)
nm_prev = models.CharField(
max_length=255,
null=True,
help_text="Previous name",
)
supp = models.ForeignKey(
db_column="suppcd",
to="Supplier",
on_delete=models.CASCADE,
help_text="Supplier",
)
lic_auth = models.ForeignKey(
db_column="lic_authcd",
to="LicensingAuthority",
on_delete=models.CASCADE,
help_text="Current licensing authority",
)
lic_auth_prev = models.ForeignKey(
db_column="lic_auth_prevcd",
to="LicensingAuthority",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Previous licensing authority",
)
lic_authchange = models.ForeignKey(
db_column="lic_authchangecd",
to="LicensingAuthorityChangeReason",
on_delete=models.CASCADE,
null=True,
help_text="Reason for change of licensing authority",
)
lic_authchangedt = models.DateField(
null=True,
help_text="Date of change of licensing authority",
)
combprod = models.ForeignKey(
db_column="combprodcd",
to="CombinationProdInd",
on_delete=models.CASCADE,
null=True,
help_text="Combination product",
)
flavour = models.ForeignKey(
db_column="flavourcd",
to="Flavour",
on_delete=models.CASCADE,
null=True,
help_text="Flavour",
)
ema = models.BooleanField(
help_text="EMA additional monitoring",
)
parallel_import = models.BooleanField(
help_text="Parallel import",
)
avail_restrict = models.ForeignKey(
db_column="avail_restrictcd",
to="AvailabilityRestriction",
on_delete=models.CASCADE,
help_text="Restrictions on availability",
)
bnf_code = models.CharField(max_length=15, null=True)
def title(self):
return self.descr
class ApIng(models.Model):
class Meta:
verbose_name = "TODO"
amp = models.ForeignKey(
db_column="apid",
to="AMP",
on_delete=models.CASCADE,
help_text="AMP",
)
ing = models.ForeignKey(
db_column="isid",
to="Ing",
on_delete=models.CASCADE,
help_text="Ingredient",
)
strnth = models.DecimalField(
max_digits=10,
decimal_places=3,
null=True,
help_text="Pharmaceutical strength numerical value",
)
uom = models.ForeignKey(
db_column="uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Pharmaceutical Strength Unit of Measure",
)
class LicRoute(models.Model):
class Meta:
verbose_name = "Licensed Route"
amp = models.ForeignKey(
db_column="apid",
to="AMP",
on_delete=models.CASCADE,
help_text="AMP",
)
route = models.ForeignKey(
db_column="routecd",
to="Route",
on_delete=models.CASCADE,
help_text="Licenced route",
)
class ApInfo(models.Model):
class Meta:
verbose_name = "Appliance Product Information"
amp = models.OneToOneField(
db_column="apid",
to="AMP",
on_delete=models.CASCADE,
help_text="AMP",
)
sz_weight = models.CharField(
max_length=100,
null=True,
help_text="Size / weight",
)
colour = models.ForeignKey(
db_column="colourcd",
to="Colour",
on_delete=models.CASCADE,
null=True,
help_text="Colour",
)
prod_order_no = models.CharField(
max_length=20,
null=True,
help_text="Product order number",
)
class VMPP(models.Model):
class Meta:
verbose_name = "Virtual Medicinal Product Pack"
ordering = ["nm"]
objects = managers.VMPPManager()
obj_type = "vmpp"
name_field = "nm"
def __str__(self):
return str(self.id)
id = models.BigIntegerField(
primary_key=True,
db_column="vppid",
help_text="Identifier",
)
invalid = models.BooleanField(
help_text="Invalid",
)
nm = models.CharField(
max_length=420,
help_text="Description",
)
vmp = models.ForeignKey(
db_column="vpid",
to="VMP",
on_delete=models.CASCADE,
help_text="VMP",
)
qtyval = models.DecimalField(
max_digits=10,
decimal_places=2,
null=True,
help_text="Quantity value",
)
qty_uom = models.ForeignKey(
db_column="qty_uomcd",
to="UnitOfMeasure",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Quantity unit of measure",
)
combpack = models.ForeignKey(
db_column="combpackcd",
to="CombinationPackInd",
on_delete=models.CASCADE,
null=True,
help_text="Combination pack",
)
bnf_code = models.CharField(max_length=15, null=True)
def title(self):
return self.nm
class Dtinfo(models.Model):
class Meta:
verbose_name = "Drug Tariff Category Information"
vmpp = models.OneToOneField(
db_column="vppid",
to="VMPP",
on_delete=models.CASCADE,
help_text="VMPP",
)
pay_cat = models.ForeignKey(
db_column="pay_catcd",
to="DtPaymentCategory",
on_delete=models.CASCADE,
help_text="Drug Tariff payment category",
)
price = models.IntegerField(
null=True,
help_text="Drug Tariff price",
)
dt = models.DateField(
null=True,
help_text="Date from which applicable",
)
prevprice = models.IntegerField(
null=True,
help_text="Previous price",
)
class AMPP(models.Model):
class Meta:
verbose_name = "Actual Medicinal Product Pack"
ordering = ["nm"]
objects = managers.AMPPManager()
obj_type = "ampp"
name_field = "nm"
def __str__(self):
return str(self.id)
id = models.BigIntegerField(
primary_key=True,
db_column="appid",
help_text="Identifier",
)
invalid = models.BooleanField(
help_text="Invalid",
)
nm = models.CharField(
max_length=774,
help_text="Description",
)
abbrevnm = models.CharField(
max_length=60,
null=True,
help_text="Abbreviated name",
)
vmpp = models.ForeignKey(
db_column="vppid",
to="VMPP",
on_delete=models.CASCADE,
help_text="VMPP",
)
amp = models.ForeignKey(
db_column="apid",
to="AMP",
on_delete=models.CASCADE,
help_text="AMP",
)
combpack = models.ForeignKey(
db_column="combpackcd",
to="CombinationPackInd",
on_delete=models.CASCADE,
null=True,
help_text="Combination pack",
)
legal_cat = models.ForeignKey(
db_column="legal_catcd",
to="LegalCategory",
on_delete=models.CASCADE,
help_text="Legal category",
)
subp = models.CharField(
max_length=30,
null=True,
help_text="Sub pack info",
)
disc = models.ForeignKey(
db_column="disccd",
to="DiscontinuedInd",
on_delete=models.CASCADE,
null=True,
help_text="Discontinued",
)
discdt = models.DateField(
null=True,
help_text="Discontinued change date",
)
bnf_code = models.CharField(max_length=15, null=True)
def title(self):
return self.nm
class PackInfo(models.Model):
class Meta:
verbose_name = "TODO"
ampp = models.OneToOneField(
db_column="appid",
to="AMPP",
on_delete=models.CASCADE,
help_text="AMPP",
)
reimb_stat = models.ForeignKey(
db_column="reimb_statcd",
to="ReimbursementStatus",
on_delete=models.CASCADE,
help_text="Appliance reimbursement status",
)
reimb_statdt = models.DateField(
null=True,
help_text="Date appliance reimbursement status became effective",
)
reimb_statprev = models.ForeignKey(
db_column="reimb_statprevcd",
to="ReimbursementStatus",
on_delete=models.CASCADE,
related_name="+",
null=True,
help_text="Appliance reimbursement previous status",
)
pack_order_no = models.CharField(
max_length=20,
null=True,
help_text="Pack order number",
)
class PrescribInfo(models.Model):
class Meta:
verbose_name = "Product Prescribing Information"
ampp = models.OneToOneField(
db_column="appid",
to="AMPP",
on_delete=models.CASCADE,
help_text="AMPP",
)
sched_2 = models.BooleanField(
help_text="Schedule 2",
)
acbs = models.BooleanField(
help_text="ACBS",
)
padm = models.BooleanField(
help_text="Personally administered",
)
fp10_mda = models.BooleanField(
help_text="FP10 MDA Prescription",
)
sched_1 = models.BooleanField(
help_text="Schedule 1",
)
hosp = models.BooleanField(
help_text="Hospital",
)
nurse_f = models.BooleanField(
help_text="Nurse formulary",
)
enurse_f = models.BooleanField(
help_text="Nurse extended formulary",
)
dent_f = models.BooleanField(
help_text="Dental formulary",
)
class PriceInfo(models.Model):
class Meta:
verbose_name = "Medicinal Product Price"
ampp = models.OneToOneField(
db_column="appid",
to="AMPP",
on_delete=models.CASCADE,
help_text="AMPP",
)
price = models.IntegerField(
null=True,
help_text="Price",
)
pricedt = models.DateField(
null=True,
help_text="Date of price validity",
)
price_prev = models.IntegerField(
null=True,
help_text="Price prior to change date",
)
price_basis = models.ForeignKey(
db_column="price_basiscd",
to="PriceBasis",
on_delete=models.CASCADE,
help_text="Price basis",
)
class ReimbInfo(models.Model):
class Meta:
verbose_name = "Reimbursement Information"
ampp = models.OneToOneField(
db_column="appid",
to="AMPP",
on_delete=models.CASCADE,
help_text="AMPP",
)
px_chrgs = models.IntegerField(
null=True,
help_text="Prescription charges",
)
disp_fees = models.IntegerField(
null=True,
help_text="Dispensing fees",
)
bb = models.BooleanField(
help_text="Broken bulk",
)
cal_pack = models.BooleanField(
help_text="Calendar pack",
)
spec_cont = models.ForeignKey(
db_column="spec_contcd",
to="SpecCont",
on_delete=models.CASCADE,
null=True,
help_text="Special container",
)
dnd = models.ForeignKey(
db_column="dndcd",
to="Dnd",
on_delete=models.CASCADE,
null=True,
help_text="Discount not deducted",
)
fp34d = models.BooleanField(
help_text="FP34D prescription item",
)
class Ing(models.Model):
class Meta:
verbose_name = "TODO"
id = models.BigIntegerField(
primary_key=True,
db_column="isid",
help_text="Identifier",
)
isiddt = models.DateField(
null=True,
help_text="Date identifier became valid",
)
isidprev = models.BigIntegerField(
null=True,
help_text="Previous identifier",
)
invalid = models.BooleanField(
help_text="Invalid",
)
nm = models.CharField(
max_length=255,
help_text="Name",
)
class CombinationPackInd(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class CombinationProdInd(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class BasisOfName(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=150,
help_text="Description",
)
class NamechangeReason(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=150,
help_text="Description",
)
class VirtualProductPresStatus(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class ControlDrugCategory(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class LicensingAuthority(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class UnitOfMeasure(models.Model):
cd = models.BigIntegerField(
primary_key=True,
help_text="Code",
)
cddt = models.DateField(
null=True,
help_text="Date code is applicable from",
)
cdprev = models.BigIntegerField(
null=True,
help_text="Previous code",
)
descr = models.CharField(
max_length=150,
help_text="Description",
)
class Form(models.Model):
cd = models.BigIntegerField(
primary_key=True,
help_text="Code",
)
cddt = models.DateField(
null=True,
help_text="Date code is applicable from",
)
cdprev = models.BigIntegerField(
null=True,
help_text="Previous code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class OntFormRoute(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class Route(models.Model):
cd = models.BigIntegerField(
primary_key=True,
help_text="Code",
)
cddt = models.DateField(
null=True,
help_text="Date code is applicable from",
)
cdprev = models.BigIntegerField(
null=True,
help_text="Previous code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class DtPaymentCategory(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class Supplier(models.Model):
cd = models.BigIntegerField(
primary_key=True,
help_text="Code",
)
cddt = models.DateField(
null=True,
help_text="Date code is applicable from",
)
cdprev = models.BigIntegerField(
null=True,
help_text="Previous code",
)
invalid = models.BooleanField(
help_text="Invalid",
)
descr = models.CharField(
max_length=80,
help_text="Description",
)
class Flavour(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class Colour(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class BasisOfStrnth(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=150,
help_text="Description",
)
class ReimbursementStatus(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class SpecCont(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class Dnd(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class VirtualProductNonAvail(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class DiscontinuedInd(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class DfIndicator(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=20,
help_text="Description",
)
class PriceBasis(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class LegalCategory(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class AvailabilityRestriction(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class LicensingAuthorityChangeReason(models.Model):
cd = models.IntegerField(
primary_key=True,
help_text="Code",
)
descr = models.CharField(
max_length=60,
help_text="Description",
)
class GTIN(models.Model):
class Meta:
verbose_name = "Global Trade Item Number"
ampp = models.OneToOneField(
db_column="appid",
to="AMPP",
on_delete=models.CASCADE,
help_text="AMPP",
)
gtin = models.BigIntegerField(
help_text="GTIN",
)
startdt = models.DateField(
help_text="GTIN date",
)
enddt = models.DateField(
null=True,
help_text="The date the GTIN became invalid",
)
|
annapowellsmith/openpresc
|
openprescribing/dmd2/models.py
|
Python
|
mit
| 28,012
|
import Differentiation as Diff
import numpy as np
import sys
# Geometry: periodic in x and y
# Arakawa C-grid
#
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
#
# If Periodic X Periodic: u,v,h = Nx by Ny
# If Periodic X Walls: u,h: Nx by (Ny+1) and
# v: Nx by Ny
# If Walls X Periodic: v,h: (Nx+1) by Ny and
# u: Nx by Ny
#
# N,S rows:
# -> must advance u,h
# -> Must extend v to compute V_y
# -> If v = 0 then maybe (q*V^x) = 0 too?
# W,E columns:
# -> must advance v,h
# -> Must extend u to compute U_x
# -> If u = 0 then (q*U^y) = 0 too?
# ghost cells:
# u-eqn: need q*V^x
# v-eqn: need q*U_y
# h-eqn: need U left and V down
def sadourny_sw_flux(sim):
Nx, Ny, Nz = sim.Nx, sim.Ny, sim.Nz
dx, dy = sim.dx[0], sim.dx[1]
# Loop through each layer and compute the flux
for ii in range(Nz):
# Assign nice names to primary variables
h = sim.soln.h[:,:,ii]
u = sim.soln.u[:,:,ii]
v = sim.soln.v[:,:,ii]
# Compute secondary varibles
U = sim.avx_h(h)*u
V = sim.avy_h(h)*v
B = sim.gs[ii]*h + 0.5*(sim.avx_u(u**2) + sim.avy_v(v**2))
q = (sim.ddx_v(v,dx) - sim.ddy_u(u,dy) + sim.F)/(sim.avy_u(sim.avx_h(h)))
# Flux
#sim.curr_flux.u[:,:,ii] = sim.avy_v(q*sim.avx_v(V)) - sim.ddx_h(B,dx)
#sim.curr_flux.v[:,:,ii] = - sim.avx_u(q*sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.u[:,:,ii] = sim.avy_v(q)*sim.avy_v(sim.avx_v(V)) - sim.ddx_h(B,dx)
sim.curr_flux.v[:,:,ii] = - sim.avx_u(q)*sim.avx_u(sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.h[:,:,ii] = - sim.ddx_u(U,dx) - sim.ddy_v(V,dy)
return
def sadourny_sw_linear_flux(sim):
Nx, Ny, Nz = sim.Nx, sim.Ny, sim.Nz
dx, dy = sim.dx[0], sim.dx[1]
#ddx, ddy = sim.ddx, sim.ddy
#avx, avy = sim.avx, sim.avy
Hs = sim.Hs[0]
# Loop through each layer and compute the flux
for ii in range(sim.Nz):
# Assign nice names to primary variables
h = sim.soln.h[:,:,ii]
u = sim.soln.u[:,:,ii]
v = sim.soln.v[:,:,ii]
# Compute secondary varibles
U = Hs*u
V = Hs*v
q = sim.F/Hs
B = sim.gs[ii]*h
# Flux
#sim.curr_flux.u[:,:,ii] = sim.avy_v(q*sim.avx_v(V)) - sim.ddx_h(B,dx)
#sim.curr_flux.v[:,:,ii] = - sim.avx_u(q*sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.u[:,:,ii] = sim.avy_v(q)*sim.avy_v(sim.avx_v(V)) - sim.ddx_h(B,dx)
sim.curr_flux.v[:,:,ii] = - sim.avx_u(q)*sim.avx_u(sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.h[:,:,ii] = - sim.ddx_u(U,dx) - sim.ddy_v(V,dy)
return
def sadourny_sw(sim):
# FJP: work on BCs
if sim.Nx == 1:
sim.Nkx = 1
else:
if sim.geomx == 'periodic':
sim.Nkx = sim.Nx
elif sim.geomx == 'walls':
sim.Nkx = 2*sim.Nx
if sim.Ny == 1:
sim.Nky = 1
else:
if sim.geomy == 'periodic':
sim.Nky = sim.Ny
elif sim.geomy == 'walls':
sim.Nky = 2*sim.Ny
sim.x_derivs = Diff.SADOURNY_x
sim.y_derivs = Diff.SADOURNY_y
if sim.dynamics == 'Nonlinear':
sim.flux_function = sadourny_sw_flux
elif sim.dynamics == 'Linear':
sim.flux_function = sadourny_sw_linear_flux
else:
print "dynamics must be from the list: Nonlinear, Linear"
sys.exit()
|
PyRsw/PyRsw
|
src/Fluxes/SADOURNY_SW.py
|
Python
|
mit
| 4,319
|
# -*- coding: utf-8 -*-
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../../tools'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.pngmath',
'sphinx.ext.intersphinx',
# Create links to Python source code for the module.
# 'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.inheritance_diagram',
]
# Add any locations and names of other projects that should be linked to in this documentation.
intersphinx_mapping = {
'python': ('http://docs.python.org', None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MPipe'
copyright = u'2014, Velimir Mlaker'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = ['wm5.', 'wm5', ]
# Set this to 'both' to append the __init__(self) docstring to the class docstring.
autoclass_content = 'both'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'mpipe'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = '{0} Documentation'.format(project)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**' : [],
# '**' : ['localtoc.html'],
# '**' : ['globaltoc.html'],
# '**' : ['searchbox.html', 'search.html'],
# '**' : ['searchbox.html'],
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = { 'search' : 'search.html' }
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MPipedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MPipe.tex', u'MPipe Documentation',
u'Velimir Mlaker', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mpipe', u'MPipe Documentation',
[u'Velimir Mlaker'], 1)
]
rst_prolog = '''
.. |NAME| replace:: MPipe
'''
# End of file.
|
vmlaker/mpipe
|
doc/source/conf.py
|
Python
|
mit
| 7,581
|
##
# Copyright (c) 2008-2010 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import collections
from importkit.yaml import validator
from importkit.yaml.validator.tests.base import SchemaTest, raises, result
class TestTypes(SchemaTest):
def setUp(self):
super().setUp()
self.schema = self.get_schema('types.Schema')
@raises(validator.SchemaValidationError, 'expected none')
def test_validator_types_none_fail1(self):
"""
none: '12'
"""
@result(key='none', value=None)
def test_validator_types_none_result(self):
"""
none:
"""
@raises(validator.SchemaValidationError, 'expected integer')
def test_validator_types_int_fail1(self):
"""
int: '12'
"""
@raises(validator.SchemaValidationError, 'expected integer')
def test_validator_types_int_fail2(self):
"""
int: 123.2
"""
@result(key='int', value=31415)
def test_validator_types_int_result(self):
"""
int: 31415
"""
@raises(validator.SchemaValidationError, 'expected number (int or float)')
def test_validator_types_number_fail1(self):
"""
number: [123, 1]
"""
@result(key='number', value=31415)
def test_validator_types_number_int_result(self):
"""
number: 31415
"""
@result(key='number', value=31415.2)
def test_validator_types_number_float_result(self):
"""
number: 31415.2
"""
@raises(validator.SchemaValidationError, 'expected text (number or str)')
def test_validator_types_text_fail1(self):
"""
text: [123, 1]
"""
@result(key='text', value='31415')
def test_validator_types_text_int_result(self):
"""
text: 31415
"""
@result(key='text', value='31415.123')
def test_validator_types_text_float_result(self):
"""
text: 31415.123
"""
@result(key='bool', value=True)
def test_validator_types_bool_yes_result(self):
"""
bool: yes
"""
@result(key='bool', value=True)
def test_validator_types_bool_True_result(self):
"""
bool: True
"""
@result(key='bool', value=True)
def test_validator_types_bool_true_result(self):
"""
bool: true
"""
@result(key='bool', value=False)
def test_validator_types_bool_yes_result2(self):
"""
bool: no
"""
@result(key='bool', value=False)
def test_validator_types_bool_True_result2(self):
"""
bool: false
"""
@raises(validator.SchemaValidationError, 'expected bool')
def test_validator_types_bool_fail1(self):
"""
bool: 1
"""
@raises(validator.SchemaValidationError, 'expected bool')
def test_validator_types_bool_fail2(self):
"""
bool: 'yes'
"""
@raises(validator.SchemaValidationError, 'mapping expected')
def test_validator_types_map_fail1(self):
"""
dict: 'WRONG'
"""
@raises(validator.SchemaValidationError, "unexpected key 'wrongkey'")
def test_validator_types_map_fail2(self):
"""
dict:
wrongkey: 1
"""
@result(key='dict', value={'test1': 3, 'test2': 'a'})
def test_validator_types_map_defaults(self):
"""
dict:
"""
@raises(validator.SchemaValidationError, 'the number of elements in mapping must not be less than 2')
def test_validator_types_map_constraints1(self):
"""
fdict:
a: "1"
"""
@raises(validator.SchemaValidationError, 'the number of elements in mapping must not exceed 3')
def test_validator_types_map_constraints2(self):
"""
fdict:
a: "1"
b: "2"
c: "3"
d: "4"
"""
@result(key='fdict', value={'a': "1", 'b': "2"})
def test_validator_types_map_constraints_ok(self):
"""
fdict:
a: "1"
b: "2"
"""
@raises(validator.SchemaValidationError, "duplicate mapping key 'A'")
def test_validator_types_map_duplicate_key_check(self):
"""
fdict:
A: "1"
A: "2"
"""
@result(key='fdict', value={'a': "1", ('b', 'c'): "2"})
def test_validator_types_map_nonscalar_key(self):
"""
fdict:
a: "1"
[b, c]: "2"
"""
@result(key='redict', value={'UPPERCASE': 10, 'lowercase': '10', '12345': True})
def test_validator_type_map_pattern_key_ok(self):
"""
redict:
UPPERCASE: 10
lowercase: '10'
"""
@raises(validator.SchemaValidationError, "unexpected key '1'")
def test_validator_type_map_pattern_key_fail(self):
"""
redict:
1: 10
"""
@result(key='minmax', value=3)
def test_validator_types_int_minmax(self):
"""
minmax: 3
"""
@raises(validator.SchemaValidationError, 'range-min validation failed')
def test_validator_types_int_minmax_fail(self):
"""
minmax: 2
"""
@raises(validator.SchemaValidationError, 'range-max-ex validation failed')
def test_validator_types_int_minmax_fail2(self):
"""
minmax: 20
"""
@result(key='odict', value=collections.OrderedDict([('A', 1), ('B', 2), ('C', 3), ('D', 4)]))
def test_validator_types_ordered_map(self):
"""
odict:
A: 1
B: 2
C: 3
D: 4
"""
|
sprymix/importkit
|
importkit/yaml/validator/tests/test_types.py
|
Python
|
mit
| 5,672
|
# /usr/bin/env python
import pygal
import requests
import json
#Use a view in CouchDB to get the data
#use the first key for attribute type
#order descending so when limit the results will get the latest at the top
r = requests.get('http://127.0.0.1:5984/mvp_sensor_data/_design/doc/_view/attribute_value?startkey=["temperature",{}]&endkey=["temperature"]&descending=true&limit=60')
#print(r)
v_lst = [float(x['value']['value']) for x in r.json()['rows']]
#print(v_lst)
ts_lst = [x['value']['timestamp'] for x in r.json()['rows']]
#print(ts_lst)
line_chart = pygal.Line()
line_chart.title = 'Temperature'
line_chart.y_title="Degrees C"
line_chart.x_title="Timestamp (hover over to display date)"
#need to reverse order to go from earliest to latest
ts_lst.reverse()
line_chart.x_labels = ts_lst
#need to reverse order to go from earliest to latest
v_lst.reverse()
line_chart.add('Air Temp', v_lst)
line_chart.render_to_file('/home/pi/MVP_UI/web/temp_chart.svg')
|
webbhm/OpenAg_MVP_UI
|
MVP_UI/python/temp_chart.py
|
Python
|
mit
| 966
|
import requests
from pact_test.either import *
from pact_test.repositories.pact_broker import upload_pact
from pact_test.repositories.pact_broker import next_version
from pact_test.repositories.pact_broker import format_headers
from pact_test.repositories.pact_broker import get_latest_version
def test_current_version_error(mocker):
class GetResponse(object):
status_code = 200
def json(self):
raise requests.exceptions.ConnectionError('Boom!')
mocker.patch.object(requests, 'put', lambda x, **kwargs:
PutResponse())
mocker.patch.object(requests, 'get', lambda x, **kwargs:
GetResponse())
out = upload_pact('provider', 'consumer', {})
assert type(out) is Left
def test_connection_error(mocker):
class GetResponse(object):
status_code = 200
def json(self):
return {'_embedded': {'versions': [{'number': '1.0.41'}]}}
class PutResponse(object):
status_code = 200
def json(self):
raise requests.exceptions.ConnectionError('Boom!')
mocker.patch.object(requests, 'put', lambda x, **kwargs:
PutResponse())
mocker.patch.object(requests, 'get', lambda x, **kwargs:
GetResponse())
out = upload_pact('provider', 'consumer', {})
assert type(out) is Left
def test_upload_pact(mocker):
class GetResponse(object):
status_code = 200
def json(self):
return {'_embedded': {'versions': [{'number': '1.0.41'}]}}
class PutResponse(object):
status_code = 200
def json(self):
return {}
mocker.patch.object(requests, 'put', lambda x, **kwargs: PutResponse())
mocker.patch.object(requests, 'get', lambda x, **kwargs: GetResponse())
out = upload_pact('provider', 'consumer', {})
assert type(out) is Right
def test_next_version():
assert next_version('1.0.41') == '1.0.42'
def test_get_latest_version(mocker):
class Response(object):
status_code = 200
def json(self):
return {'_embedded': {'versions': [{'number': 42}]}}
mocker.patch.object(requests, 'get', lambda x, **kwargs: Response())
latest_version = get_latest_version('eggs')
assert type(latest_version) is Right
assert latest_version.value == 42
def test_missing_latest_version(mocker):
class Response(object):
status_code = 404
mocker.patch.object(requests, 'get', lambda x, **kwargs: Response())
latest_version = get_latest_version('eggs')
assert type(latest_version) is Right
assert latest_version.value == '1.0.0'
def test_wrong_url():
latest_version = get_latest_version('eggs', base_url='http://host:9999/')
msg = 'Failed to establish a new connection with http://host:9999/'
assert type(latest_version) is Left
assert latest_version.value == msg
def test_format_headers():
pact = {
"interactions": [
{
"request": {
"headers": [
{'spam': 'eggs'},
{'Content-Type': 'application/json'}
]
},
"response": {
"headers": [
{'spam': 'eggs'},
{'Content-Type': 'application/json'}
]
}
}
]
}
new_pact = format_headers(pact)
expected_request_headers = {
'spam': 'eggs',
'Content-Type': 'application/json'
}
expected_response_headers = {
'spam': 'eggs',
'Content-Type': 'application/json'
}
request_headers = new_pact['interactions'][0]['request']['headers']
response_headers = new_pact['interactions'][0]['response']['headers']
assert request_headers == expected_request_headers
assert response_headers == expected_response_headers
|
Kalimaha/pact-test
|
tests/repositories/pact_broker.py
|
Python
|
mit
| 3,852
|
from django.apps import apps
from contextlib import contextmanager
def session():
return apps.get_app_config('basex').basex
@contextmanager
def recipe_db():
s = session()
s.execute('open recipe')
yield s
s.close()
|
jajadinimueter/recipe
|
apps/basex/basex.py
|
Python
|
mit
| 239
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@file __init__.py.py
@author Allen Woods
@date 2016-08-02
@version 16-8-2 上午8:50 ???
Some other Description
"""
def func():
pass
class Main(object):
def __init__(self):
pass
if __name__ == '__main__':
pass
|
allenwoods/parasys
|
debug/__init__.py
|
Python
|
mit
| 306
|
#!usr/bin/env python
#-*- coding:utf-8 -*-
"""
@author: James Zhang
@date:
"""
import numpy as np
import theano
import theano.tensor as T
from theano.ifelse import ifelse
from theano.tensor.shared_randomstreams import RandomStreams
from collections import OrderedDict
import copy
import sys
sys.setrecursionlimit(1000000)
def handle_binary_vector(given_list, k):
# handle_binary_vector[0] 返回二值化后的列表
# handle_binary_vector[1] 返回原列表
tmp_list = copy.deepcopy(given_list)
given_list.sort(reverse=True)
new_sort_array = given_list[0:k]
index_list = []
for each_num in new_sort_array:
index_list.append(tmp_list.index(each_num))
new_vector_list=np.zeros(len(given_list),dtype='int64')
for each_position in index_list:
new_vector_list[each_position]=1
return (new_vector_list,tmp_list)
def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)
def random_weights(shape, name=None):
# return theano.shared(floatX(np.random.randn(*shape) * 0.01), name=name)
return theano.shared(floatX(np.random.uniform(size=shape, low=-0.5, high=0.5)), name=name)
def zeros(shape, name=""):
return theano.shared(floatX(np.zeros(shape)), name=name)
def softmax(X, temperature=1.0):
e_x = T.exp((X - X.max(axis=1).dimshuffle(0, 'x')) / temperature) # dimshuffle(0, 'x') output 2 dim array
# return prob of each label. prob1+...+probn = 1
return e_x / e_x.sum(axis=1).dimshuffle(0, 'x') # dimshuffle(0, 'x') output 2 dim array
def sigmoid(X):
return 1 / (1 + T.exp(-X))
def dropout(X, dropout_prob=0.0):
retain_prob = 1 - dropout_prob
srng = RandomStreams(seed=1234)
X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
X /= retain_prob
return X
# def dropout(x, dropout_prob):
# if dropout_prob < 0. or dropout_prob > 1.:
# raise Exception('Dropout level must be in interval [0, 1]')
# retain_prob = 1. - dropout_prob
# sample=np.random.binomial(n=1, p=retain_prob, size=x.shape)
# x *= sample
# x /= retain_prob
# return x
def rectify(X):
return T.maximum(X, 0.)
def clip(X, epsilon):
return T.maximum(T.minimum(X, epsilon), -1*epsilon)
def scale(X, max_norm):
curr_norm = T.sum(T.abs_(X))
return ifelse(T.lt(curr_norm, max_norm), X, max_norm * (X / curr_norm))
def SGD(loss, params, learning_rate, lambda2=0.05):
updates = OrderedDict()
grads = T.grad(cost=loss, wrt=params)
for p, g in zip(params, grads):
# updates.append([p, p-learning_rate*(g+lambda2*p)]) # lambda*p regulzation
updates[p] = p - learning_rate * (g + lambda2 * p)
return updates, grads
def momentum(loss, params, caches, learning_rate=0.1, rho=0.1, clip_at=0.0, scale_norm=0.0, lambda2=0.0):
updates = OrderedDict()
grads = T.grad(cost=loss, wrt=params)
for p, c, g in zip(params, caches, grads):
if clip_at > 0.0:
grad = clip(g, clip_at)
else:
grad = g
if scale_norm > 0.0:
grad = scale(grad, scale_norm)
delta = rho * grad + (1-rho) * c
updates[p] = p - learning_rate * (delta + lambda2 * p)
return updates, grads
def get_params(layers):
params = []
for layer in layers:
for param in layer.get_params():
params.append(param)
return params
def make_caches(params):
caches = []
for p in params:
caches.append(theano.shared(floatX(np.zeros(p.get_value().shape))))
return caches
"""
make_caches的功能:
提供和p(参数)同shape的全0矩阵
用与梯度下降方法
"""
def one_step_updates(layers):
updates = []
for layer in layers:
updates += layer.updates()
return updates
|
jfzhang95/lightML
|
SupervisedLearning/Neural Layers/methods.py
|
Python
|
mit
| 3,801
|
# coding: utf-8
from boto.s3.bucket import Bucket
from thumbor.utils import logger
from tornado.concurrent import return_future
import urllib2
import thumbor.loaders.http_loader as http_loader
from tc_aws.aws.connection import get_connection
def _get_bucket(url, root_path=None):
"""
Returns a tuple containing bucket name and bucket path.
url: A string of the format /bucket.name/file/path/in/bucket
"""
url_by_piece = url.lstrip("/").split("/")
bucket_name = url_by_piece[0]
if root_path is not None:
url_by_piece[0] = root_path
else:
url_by_piece = url_by_piece[1:]
bucket_path = "/".join(url_by_piece)
return bucket_name, bucket_path
def _normalize_url(url):
"""
:param url:
:return: exactly the same url since we only use http loader if url stars with http prefix.
"""
return url
def _validate_bucket(context, bucket):
allowed_buckets = context.config.get('S3_ALLOWED_BUCKETS', default=None)
return not allowed_buckets or bucket in allowed_buckets
@return_future
def load(context, url, callback):
enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)
if enable_http_loader and url.startswith('http'):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
url = urllib2.unquote(url)
bucket = context.config.get('S3_LOADER_BUCKET', default=None)
if not bucket:
bucket, url = _get_bucket(url, root_path=context.config.S3_LOADER_ROOT_PATH)
if _validate_bucket(context, bucket):
bucket_loader = Bucket(
connection=get_connection(context),
name=bucket
)
file_key = None
try:
file_key = bucket_loader.get_key(url)
except Exception, e:
logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))
if file_key:
callback(file_key.read())
return
callback(None)
|
guilhermef/aws
|
tc_aws/loaders/s3_loader.py
|
Python
|
mit
| 2,000
|
from __future__ import unicode_literals
from django.apps import AppConfig
class ContentConfig(AppConfig):
"""
Module to manage i18n contents
"""
name = 'content'
|
vollov/lotad
|
content/apps.py
|
Python
|
mit
| 182
|
def sir(n,I,tt):
import matplotlib.pyplot as pt
#n=int(input('enter no of population='))
#I=int(input('enter initial infected people='))
#t=int(input('enter time period='))
#dt=int(input('enter step size'))
size=tt
s=[0 for j in range(size)]
i=[0 for j in range(size)]
r=[0 for j in range(size)]
s[0]=n-I
i[0]=I
b=0.9
k=0.2
#print(s[0],'\t',i[0],'\t',r[0],'\t\t',s[0]+i[0]+r[0])
for j in range(1,size):
s[j]=s[j-1] - (b*s[j-1]*i[j-1])/n
i[j]=i[j-1] + (b*s[j-1]*i[j-1])/n - k*i[j-1]
r[j]=r[j-1] + k*i[j-1]
#print(s[j],'\t',i[j],'\t',r[j],'\t\t',s[j]+i[j]+r[j])
tt=list(range(size))
pt.plot(tt,s,tt,i,tt,r)
pt.legend(('S','I','R'),'upper right')
pt.xlabel('time')
pt.ylabel('population')
pt.title('sir model')
pt.show()
def print_val():
sir(int(t.get()),int(y.get()),int(u.get()))
import sys
import Tkinter
top=Tkinter.Tk()
Tkinter.Label(top,text='population').grid(row=0)
t=Tkinter.Entry(top,bd=5)
t.grid(row=0,column=1)
Tkinter.Label(top,text='infected people').grid(row=1)
y=Tkinter.Entry(top,bd=5)
y.grid(row=1,column=1)
Tkinter.Label(top,text='time').grid(row=2)
u=Tkinter.Entry(top,bd=5)
u.grid(row=2,column=1)
#sir(n=(t.get()),I=(y.get()),tt=(u.get()) )
#print(n+1)
b=Tkinter.Button(top,text='press here if values are correct' ,command=print_val)
b.grid(row=3)
top.mainloop()
|
suraj-deshmukh/myCodes
|
sir.py
|
Python
|
mit
| 1,321
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', include('launch.urls', namespace="launch", app_name="launch")),
url(r'^admin/', include(admin.site.urls)),
)
|
eltonsantos/dom
|
dom/urls.py
|
Python
|
mit
| 263
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-08 14:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('slug', models.SlugField(max_length=250, unique_for_date='publish')),
('body', models.TextField()),
('publish', models.DateTimeField(default=django.utils.timezone.now)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_posts', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-publish',),
},
),
]
|
pauljherrera/avantiweb
|
blog/migrations/0001_initial.py
|
Python
|
mit
| 1,431
|
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'persephone.settings')
app = Celery('persephone')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
|
karamanolev/persephone
|
persephone/persephone/celery.py
|
Python
|
mit
| 231
|
#!/usr/bin/env python
# This pre-processor parses provided objects' c files for
# MP_REGISTER_MODULE(module_name, obj_module, enabled_define)
# These are used to generate a header with the required entries for
# "mp_rom_map_elem_t mp_builtin_module_table[]" in py/objmodule.c
from __future__ import print_function
import re
import os
import argparse
pattern = re.compile(
r"[\n;]\s*MP_REGISTER_MODULE\((.*?),\s*(.*?),\s*(.*?)\);",
flags=re.DOTALL
)
def find_c_file(obj_file, vpath):
""" Search vpaths for the c file that matches the provided object_file.
:param str obj_file: object file to find the matching c file for
:param List[str] vpath: List of base paths, similar to gcc vpath
:return: str path to c file or None
"""
c_file = None
relative_c_file = os.path.splitext(obj_file)[0] + ".c"
relative_c_file = relative_c_file.lstrip('/\\')
for p in vpath:
possible_c_file = os.path.join(p, relative_c_file)
if os.path.exists(possible_c_file):
c_file = possible_c_file
break
return c_file
def find_module_registrations(c_file):
""" Find any MP_REGISTER_MODULE definitions in the provided c file.
:param str c_file: path to c file to check
:return: List[(module_name, obj_module, enabled_define)]
"""
global pattern
if c_file is None:
# No c file to match the object file, skip
return set()
with open(c_file) as c_file_obj:
return set(re.findall(pattern, c_file_obj.read()))
def generate_module_table_header(modules):
""" Generate header with module table entries for builtin modules.
:param List[(module_name, obj_module, enabled_define)] modules: module defs
:return: None
"""
# Print header file for all external modules.
mod_defs = []
print("// Automatically generated by makemoduledefs.py.\n")
for module_name, obj_module, enabled_define in modules:
mod_def = "MODULE_DEF_{}".format(module_name.upper())
mod_defs.append(mod_def)
print((
"#if ({enabled_define})\n"
" extern const struct _mp_obj_module_t {obj_module};\n"
" #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n"
"#else\n"
" #define {mod_def}\n"
"#endif\n"
).format(module_name=module_name, obj_module=obj_module,
enabled_define=enabled_define, mod_def=mod_def)
)
print("\n#define MICROPY_REGISTERED_MODULES \\")
for mod_def in mod_defs:
print(" {mod_def} \\".format(mod_def=mod_def))
print("// MICROPY_REGISTERED_MODULES")
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--vpath", default=".",
help="comma separated list of folders to search for c files in")
parser.add_argument("files", nargs="*",
help="list of c files to search")
args = parser.parse_args()
vpath = [p.strip() for p in args.vpath.split(',')]
modules = set()
for obj_file in args.files:
c_file = find_c_file(obj_file, vpath)
modules |= find_module_registrations(c_file)
generate_module_table_header(sorted(modules))
if __name__ == '__main__':
main()
|
adafruit/micropython
|
py/makemoduledefs.py
|
Python
|
mit
| 3,304
|
# -*- encoding: utf-8 -*-
import re
import decimal
from xml.etree.cElementTree import fromstring, tostring
from xml.etree.cElementTree import Element, iselement
from authorize import responses
API_SCHEMA = 'https://api.authorize.net/xml/v1/schema/AnetApiSchema.xsd'
API_SCHEMA_NS = "AnetApi/xml/v1/schema/AnetApiSchema.xsd"
PREFIX = "{AnetApi/xml/v1/schema/AnetApiSchema.xsd}"
INDIVIDUAL = u"individual"
BUSINESS = u"business"
ECHECK_CCD = u"CCD"
ECHECK_PPD = u"PPD"
ECHECK_TEL = u"TEL"
ECHECK_WEB = u"WEB"
BANK = u"bank"
CREDIT_CARD = u"cc"
ECHECK = u"echeck"
DAYS_INTERVAL = u"days"
MONTHS_INTERVAL = u"months"
VALIDATION_NONE = u"none"
VALIDATION_TEST = u"testMode"
VALIDATION_LIVE = u"liveMode"
ACCOUNT_CHECKING = u"checking"
ACCOUNT_SAVINGS = u"savings"
ACCOUNT_BUSINESS_CHECKING = u"businessChecking"
AUTH_ONLY = u"auth_only"
CAPTURE_ONLY = u"capture_only"
AUTH_CAPTURE = u"auth_capture"
CREDIT = u"credit"
PRIOR_AUTH_CAPTURE = u"prior_auth_capture"
VOID = u"void"
class AuthorizeSystemError(Exception):
"""
I'm a serious kind of exception and I'm raised when something
went really bad at a lower level than the application level, like
when Authorize is down or when they return an unparseable response
"""
def __init__(self, *args):
self.args = args
def __str__(self):
return "Exception: %s caused by %s" % self.args
def __repr__(self):
# Here we are printing a tuple, the , at the end is _required_
return "AuthorizeSystemError%s" % (self.args,)
c = re.compile(r'([A-Z]+[a-z_]+)')
def convert(arg):
"""
Convert an object to its xml representation
"""
if iselement(arg):
return arg # the element
if isinstance(arg, dict_accessor):
try:
return arg.text_
except:
raise Exception("Cannot serialize %s, missing text_ attribute" % (arg,))
if isinstance(arg, dict):
return arg # attributes of the element
if isinstance(arg, unicode):
return arg
if isinstance(arg, decimal.Decimal):
return unicode(arg)
if arg is True:
return 'true'
if arg is False:
return 'false'
if isinstance(arg, float):
return unicode(round(arg, 2)) # there's nothing less than cents anyway
if isinstance(arg, (int, long)):
return unicode(arg)
if isinstance(arg, str):
raise Exception("'%s' not unicode: can only accept unicode strings" % (arg,))
raise Exception("Cannot convert %s of type %s" % (arg, type(arg)))
def utf8convert(arg):
"""
Further extend L{convert} to return UTF-8 strings instead of unicode.
"""
value = convert(arg)
if isinstance(value, unicode):
return value.encode('utf-8')
return value
class XMLBuilder(object):
"""
XMLBuilder tries to be slightly clever in order to be easier for
the programmer. If you try to add arguments that are None they
won't be added to the output because empty XML tags are not worth
the bandwidth and actually mean something different than None.
"""
def __getattr__(self, key):
def _wrapper_func(*args):
converted = [convert(arg) for arg in args if arg is not None]
if not converted:
return None
el = Element(key)
settext = False
setatts = False
for arg in converted:
if iselement(arg):
el.append(arg)
elif isinstance(arg, basestring):
assert not settext, "cannot set text twice"
el.text = arg
settext = True
elif isinstance(arg, dict):
assert not setatts, "cannot set attributes twice"
for k, v in arg.iteritems():
el.set(k, v)
setatts = True
else:
raise TypeError("unhandled argument type: %s" % type(arg))
return el
return _wrapper_func
x = XMLBuilder()
def flatten(tree):
"""
Return a flattened tree in string format encoded in utf-8
"""
return tostring(tree, "utf-8")
def purify(s):
"""
s is an etree.tag and contains also information on the namespace,
if that information is present try to remove it, then convert the
camelCaseTags to underscore_notation_more_python_friendly.
"""
if s.startswith(PREFIX):
s = s[len(PREFIX):]
return '_'.join(atom.lower() for atom in c.split(s) if atom)
class dict_accessor(dict):
"""
Allow accessing a dictionary content also using dot-notation.
"""
def __getattr__(self, attr):
return super(dict_accessor, self).__getitem__(attr)
def __setattr__(self, attr, value):
super(dict_accessor, self).__setitem__(attr, value)
def parse_node(node):
"""
Return a dict_accessor representation of the node.
"""
new = dict_accessor({})
if node.text and node.text.strip():
t = node.text
if isinstance(t, unicode):
new['text_'] = t
else:
new['text_'] = t.decode('utf-8', "replace")
if node.attrib:
new['attrib_'] = dict_accessor(node.attrib)
for child in node.getchildren():
tag = purify(child.tag)
child = parse_node(child)
if tag not in new:
new[tag] = child
else:
old = new[tag]
if not isinstance(old, list):
new[tag] = [old]
new[tag].append(child)
return new
def to_dict(s, error_codes, do_raise=True, delimiter=u',', encapsulator=u'', uniform=False):
"""
Return a dict_accessor representation of the given string, if raise_
is True an exception is raised when an error code is present.
"""
try:
t = fromstring(s)
except SyntaxError, e:
raise AuthorizeSystemError(e, s)
parsed = dict_accessor(parse_node(t)) # discard the root node which is useless
try:
if isinstance(parsed.messages.message, list): # there's more than a child
return parsed
code = parsed.messages.message.code.text_
if uniform:
parsed.messages.message = [parsed.messages.message]
except KeyError:
return parsed
if code in error_codes:
if do_raise:
raise error_codes[code]
dr = None
if parsed.get('direct_response') is not None:
dr = parsed.direct_response.text_
elif parsed.get('validation_direct_response') is not None:
dr = parsed.validation_direct_response.text_
if dr is not None:
parsed.direct_response = parse_direct_response(dr,
delimiter,
encapsulator)
return parsed
m = ['code', 'subcode', 'reason_code', 'reason_text', 'auth_code',
'avs', 'trans_id', 'invoice_number', 'description', 'amount', 'method',
'trans_type', 'customer_id', 'first_name', 'last_name', 'company',
'address', 'city', 'state', 'zip', 'country', 'phone', 'fax', 'email',
'ship_first_name', 'ship_last_name', 'ship_company', 'ship_address',
'ship_city', 'ship_state', 'ship_zip', 'ship_country', 'tax', 'duty',
'freight', 'tax_exempt', 'po_number', 'md5_hash', 'ccv',
'holder_verification']
def parse_direct_response(s, delimiter=u',', encapsulator=u''):
"""
Very simple format but made of many fields, the most complex ones
have the following meanings:
code:
see L{responses.aim_codes} for all the codes
avs:
see L{responses.avs_codes} for all the codes
method: CC or ECHECK
trans_type:
AUTH_CAPTURE
AUTH_ONLY
CAPTURE_ONLY
CREDIT
PRIOR_AUTH_CAPTURE
VOID
tax_exempt: true, false, T, F, YES, NO, Y, N, 1, 0
ccv:
see L{responses.ccv_codes} for all the codes
holder_verification:
see L{responses.holder_verification_codes} for all the codes
"""
if not isinstance(s, unicode):
s = s.decode('utf-8', 'replace')
# being <e> the encapsulator and <d> the delimiter
# this is the format of the direct response:
# <e>field<e><d><e>field<e><d><e>field<e>
#
# Here's a regexp that would parse this:
# "\<e>([^\<d>\<e>]*)\<e>\<d>?"
# But it has a problem when <e> is '' and I don't
# have the will to do the much harder one that actually
# does it well... So let's just split and strip.
e = encapsulator
d = delimiter
v = s.split(e+d+e)
v[0] = v[0].lstrip(e)
v[-1] = v[-1].rstrip(e)
if not len(v) >= len(m):
d = dict_accessor({'error': "Couldn't parse the direct response"})
else:
d = dict_accessor(dict(zip(m, v)))
d.original = s
return d
def macro(action, login, key, *body):
"""
Main XML structure re-used by every request.
"""
return getattr(x, action)(
{'xmlns': API_SCHEMA_NS},
x.merchantAuthentication(
x.name(login),
x.transactionKey(key)
),
*body
)
def _address(pre='', kw={}, *extra):
"""
Basic address components with extension capability.
"""
return [
x.firstName(kw.get(pre+'first_name')), # optional
x.lastName(kw.get(pre+'last_name')), # optional
x.company(kw.get(pre+'company')), # optional
x.address(kw.get(pre+'address')), # optional
x.city(kw.get(pre+'city')), # optional
x.state(kw.get(pre+'state')), # optional
x.zip(kw.get(pre+'zip')), # optional
x.country(kw.get(pre+'country')) # optional
] + list(extra)
def address(pre='', **kw):
"""
Simple address with prefixing possibility
"""
return x.address(
*_address(pre, kw)
)
def address_2(pre='', **kw):
"""
Extended address with phoneNumber and faxNumber in the same tag
"""
return x.address(
*_address(pre, kw,
x.phoneNumber(kw.get(pre+'phone')),
x.faxNumber(kw.get(pre+'fax'))
)
)
def update_address(**kw):
return x.address(
*_address('ship_', kw,
x.phoneNumber(kw.get('ship_phone')),
x.faxNumber(kw.get('ship_fax')),
x.customerAddressId(kw['customer_address_id'])
)
)
def billTo(**kw):
return x.billTo(
*_address('bill_', kw,
x.phoneNumber(kw.get('bill_phone')), # optional
x.faxNumber(kw.get('bill_fax')) # optional
)# optional
)
def arbBillTo(**kw):
# This is just to be sure that they were passed.
# as the spec requires
kw['bill_first_name']
kw['bill_last_name']
return x.billTo(
*_address('bill_', kw)
)
def _shipTo(**kw):
return _address('ship_', kw,
x.phoneNumber(kw.get('ship_phone')),
x.faxNumber(kw.get('ship_fax'))
)
def shipToList(**kw):
return x.shipToList(
*_shipTo(**kw)
)
def shipTo(**kw):
return x.shipTo(
*_shipTo(**kw)
)
def payment(**kw):
profile_type = kw.get('profile_type', CREDIT_CARD)
if profile_type == CREDIT_CARD:
return x.payment(
x.creditCard(
x.cardNumber(kw['card_number']),
x.expirationDate(kw['expiration_date']), # YYYY-MM
x.cardCode(kw['csc'])
)
)
elif profile_type == BANK:
return x.payment(
x.bankAccount(
x.accountType(kw.get('account_type')), # optional: checking, savings, businessChecking
x.routingNumber(kw['routing_number']), # 9 digits
x.accountNumber(kw['account_number']), # 5 to 17 digits
x.nameOnAccount(kw['name_on_account']),
x.echeckType(kw.get('echeck_type')), # optional: CCD, PPD, TEL, WEB
x.bankName(kw.get('bank_name')) # optional
)
)
def transaction(**kw):
assert len(kw.get('line_items', [])) <= 30
content = [
x.amount(kw['amount']),
x.tax(
x.amount(kw.get('tax_amount')),
x.name(kw.get('tax_name')),
x.description(kw.get('tax_descr'))
),
x.shipping(
x.amount(kw.get('ship_amount')),
x.name(kw.get('ship_name')),
x.name(kw.get('ship_description'))
),
x.duty(
x.amount(kw.get('duty_amount')),
x.name(kw.get('duty_name')),
x.description(kw.get('duty_description'))
)
] + list(
x.lineItems(
x.itemId(line.get('item_id')),
x.name(line['name']),
x.description(line.get('description')),
x.quantity(line.get('quantity')),
x.unitPrice(line.get('unit_price')),
x.taxable(line.get('taxable'))
)
for line in kw.get('line_items', [])
) + [
x.customerProfileId(kw['customer_profile_id']),
x.customerPaymentProfileId(kw['customer_payment_profile_id']),
x.customerAddressId(kw.get('customer_address_id')),
]
ptype = kw.get('profile_type', AUTH_ONLY)
if ptype in (AUTH_ONLY, CAPTURE_ONLY, AUTH_CAPTURE, CREDIT):
content += [
x.order(
x.invoiceNumber(kw.get('invoice_number')),
x.description(kw.get('description')),
x.purchaseOrderNumber(kw.get('purchase_order_number'))
)
]
if ptype in (AUTH_ONLY, CAPTURE_ONLY, AUTH_CAPTURE):
content += [
x.taxExempt(kw.get('tax_exempt', False)),
x.recurringBilling(kw.get('recurring', False)),
x.cardCode(kw.get('ccv'))
]
if ptype == AUTH_ONLY:
profile_type = x.profileTransAuthOnly(
*content
)
elif ptype == CAPTURE_ONLY:
profile_type = x.profileTransCaptureOnly(
*(content + [x.approvalCode(kw['approval_code'])])
)
elif ptype == AUTH_CAPTURE:
profile_type = x.profileTransAuthCapture(
*content
)
elif ptype == PRIOR_AUTH_CAPTURE:
profile_type = x.profileTransPriorAuthCapture(
*(content + [x.transId(kw['trans_id'])])
)
# NOTE: It is possible to issue a refund without the customerProfileId and
# the customerPaymentProfileId being supplied. However, this is not
# currently supported, and requires sending the masked credit card number.
elif ptype == CREDIT:
profile_type = x.profileTransRefund(
*(content + [x.transId(kw['trans_id'])])
)
elif ptype == VOID:
profile_type = x.profileTransVoid(
*(content + [x.transId(kw['trans_id'])])
)
else:
raise Exception("Unsupported profile type: %r" % (ptype,))
return x.transaction(profile_type)
def paymentProfiles(**kw):
return x.paymentProfiles(
x.customerType(kw.get('customer_type')), # optional: individual, business
billTo(**kw),
payment(**kw)
)
def update_paymentProfile(**kw):
return x.paymentProfile(
x.customerType(kw.get('customer_type')), # optional
billTo(**kw),
payment(**kw),
x.customerPaymentProfileId(kw['customer_payment_profile_id'])
)
def paymentProfile(**kw):
return x.paymentProfile(
x.customerType(kw.get('customer_type')), # optional
billTo(**kw),
payment(**kw)
)
def profile(**kw):
content = [
x.merchantCustomerId(kw['customer_id']),
x.description(kw.get('description')),
x.email(kw.get('email')),
]
payment_profiles = kw.get('payment_profiles', None)
if payment_profiles is not None:
content = content + list(
paymentProfiles(**prof)
for prof in payment_profiles
)
else:
if kw.get('card_number') or kw.get("routing_number"):
content = content + [paymentProfiles(**kw)]
return x.profile(
*(content + [shipToList(**kw)])
)
def subscription(**kw):
trial_occurrences = kw.get('trial_occurrences')
trial_amount = None
if trial_occurrences is not None:
trial_amount = kw['trial_amount']
return x.subscription(
x.name(kw.get('subscription_name')),
x.paymentSchedule(
x.interval(
x.length(kw.get('interval_length')), # up to 3 digits, 1-12 for months, 7-365 days
x.unit(kw.get('interval_unit')) # days or months
),
x.startDate(kw.get('start_date')), # YYYY-MM-DD
x.totalOccurrences(kw.get('total_occurrences', 9999)),
x.trialOccurrences(trial_occurrences)
),
x.amount(kw.get('amount')),
x.trialAmount(trial_amount),
payment(**kw),
x.order(
x.invoiceNumber(kw.get('invoice_number')),
x.description(kw.get('description'))
),
x.customer(
x.type(kw.get('customer_type')), # individual, business
x.id(kw.get('customer_id')),
x.email(kw.get('customer_email')),
x.phoneNumber(kw.get('phone')),
x.faxNumber(kw.get('fax')),
x.driversLicense(
x.number(kw.get('driver_number')),
x.state(kw.get('driver_state')),
x.dateOfBirth(kw.get('driver_birth'))
),
x.taxId(kw.get('tax_id'))
),
arbBillTo(**kw),
shipTo(**kw)
)
def base(action, login, key, kw, *main):
return flatten(
macro(action, login, key,
x.refId(kw.get('ref_id')),
*main
)
)
__doc__ = """\
Please refer to http://www.authorize.net/support/CIM_XML_guide.pdf
for documentation on the XML protocol implemented here.
"""
|
simplegeo/authorize
|
authorize/gen_xml.py
|
Python
|
mit
| 17,930
|
import inspect
import logging
import os
import re
import shlex
import subprocess
import sys
import textwrap
import time
from datetime import datetime
from email.utils import formatdate as format_rfc2822
from io import StringIO
from urllib.parse import quote
import aiohttp
import discord
import psutil
import pytz
from asyncpg.exceptions import PostgresError
from dateutil import parser
from dateutil.tz import gettz
from discord.ext.commands import (BucketType, Group, clean_content)
from discord.ext.commands.errors import BadArgument
from bot.bot import command, cooldown, bot_has_permissions
from bot.converters import FuzzyRole, TzConverter, PossibleUser
from cogs.cog import Cog
from utils.tzinfo import fuzzy_tz, tz_dict
from utils.unzalgo import unzalgo, is_zalgo
from utils.utilities import (random_color, get_avatar, split_string,
get_emote_url, send_paged_message,
format_timedelta, parse_timeout,
DateAccuracy)
try:
from pip.commands import SearchCommand
except ImportError:
try:
from pip._internal.commands.search import SearchCommand
except (ImportError, TypeError):
SearchCommand = None
logger = logging.getLogger('terminal')
parserinfo = parser.parserinfo(dayfirst=True)
class Utilities(Cog):
def __init__(self, bot):
super().__init__(bot)
@command()
@cooldown(1, 10, BucketType.guild)
async def changelog(self, ctx, page: int=1):
sql = 'SELECT * FROM changelog ORDER BY time DESC'
rows = await self.bot.dbutil.fetch(sql)
def create_embed(row):
embed = discord.Embed(title='Changelog', description=row['changes'],
timestamp=row['time'])
return embed
def get_page(page, idx):
if not isinstance(page, discord.Embed):
page = create_embed(page)
page.set_footer(text=f'Page {idx+1}/{len(rows)}')
rows[idx] = page
return page
if page > 0:
page -= 1
elif page == 0:
page = 1
await send_paged_message(ctx, rows, True, page, get_page)
@command(aliases=['pong'])
@cooldown(1, 5, BucketType.guild)
async def ping(self, ctx):
"""Ping pong"""
t = time.perf_counter()
if ctx.received_at:
local_delay = t - ctx.received_at
else:
local_delay = datetime.utcnow().timestamp() - ctx.message.created_at.timestamp()
await ctx.trigger_typing()
t = time.perf_counter() - t
message = 'Pong!\n🏓 took {:.0f}ms\nLocal delay {:.0f}ms\nWebsocket ping {:.0f}ms'.format(t*1000, local_delay*1000, self.bot.latency*1000)
if hasattr(self.bot, 'pool'):
try:
_, sql_t = await self.bot.dbutil.fetch('SELECT 1', measure_time=True)
message += '\nDatabase ping {:.0f}ms'.format(sql_t * 1000)
except PostgresError:
message += '\nDatabase could not be reached'
await ctx.send(message)
@command(aliases=['e', 'emoji'])
@cooldown(1, 5, BucketType.channel)
async def emote(self, ctx, emote: str):
"""Get the link to an emote"""
emote = get_emote_url(emote)
if emote is None:
return await ctx.send('You need to specify an emote. Default (unicode) emotes are not supported ~~yet~~')
await ctx.send(emote)
@command(aliases=['roleping'])
@cooldown(1, 4, BucketType.channel)
async def how2role(self, ctx, *, role: FuzzyRole):
"""Searches a role and tells you how to ping it"""
name = role.name.replace('@', '@\u200b')
await ctx.send(f'`{role.mention}` {name}')
@command(aliases=['howtoping'])
@cooldown(1, 4, BucketType.channel)
async def how2ping(self, ctx, *, user):
"""Searches a user by their name and get the string you can use to ping them"""
if ctx.guild:
members = ctx.guild.members
else:
members = self.bot.get_all_members()
def filter_users(predicate):
for member in members:
if predicate(member):
return member
if member.nick and predicate(member.nick):
return member
if ctx.message.raw_role_mentions:
i = len(ctx.invoked_with) + len(ctx.prefix) + 1
user = ctx.message.clean_content[i:]
user = user[user.find('@')+1:]
found = filter_users(lambda u: str(u).startswith(user))
s = '`<@!{}>` {}'
if found:
return await ctx.send(s.format(found.id, str(found)))
found = filter_users(lambda u: user in str(u))
if found:
return await ctx.send(s.format(found.id, str(found)))
else:
return await ctx.send('No users found with %s' % user)
@command(aliases=['src', 'source_code', 'sauce'])
@cooldown(1, 5, BucketType.user)
async def source(self, ctx, *cmd):
"""Link to the source code for this bot
You can also get the source code of commands by doing {prefix}{name} cmd_name"""
if cmd:
full_name = ' '.join(cmd)
cmnd = self.bot.all_commands.get(cmd[0])
if cmnd is None:
raise BadArgument(f'Command "{full_name}" not found')
for c in cmd[1:]:
if not isinstance(cmnd, Group):
raise BadArgument(f'Command "{full_name}" not found')
cmnd = cmnd.get_command(c)
cmd = cmnd
if not cmd:
await ctx.send('You can find the source code for this bot here https://github.com/s0hv/Not-a-bot')
return
source, line_number = inspect.getsourcelines(cmd.callback)
filename = inspect.getsourcefile(cmd.callback).replace(os.getcwd(), '').strip('\\/')
# unformatted source
original_source = textwrap.dedent(''.join(source))
# Url pointing to the command in github
url = f'https://github.com/s0hv/Not-a-bot/tree/master/{filename}#L{line_number}'
# Source code in message
source = original_source.replace('```', '`\u200b`\u200b`') # Put zero width space between backticks so they can be within a codeblock
source = f'<{url}>\n```py\n{source}\n```'
if len(source) > 2000:
file = discord.File(StringIO(original_source), filename=f'{full_name}.py')
await ctx.send(f'Content was longer than 2000 ({len(source)} > 2000)\n<{url}>', file=file)
return
await ctx.send(source)
@command()
@cooldown(1, 5, BucketType.user)
async def undo(self, ctx):
"""
Undoes the last undoable command result. Not all messages will be undoable
and undoable messages override each other because only one message can be
undone.
"""
if not await ctx.undo():
await ctx.send('Failed to undo the latest undoable command for you.\n'
'Do note that they expire in one minute')
@command()
@cooldown(1, 10, BucketType.user)
async def invite(self, ctx):
"""This bots invite link"""
await ctx.send(f'<https://discordapp.com/api/oauth2/authorize?client_id={self.bot.user.id}&permissions=1342557248&scope=bot>')
@staticmethod
def _unpad_zero(value):
if not isinstance(value, str):
return
return value.lstrip('0')
@command(aliases=['bot', 'botinfo'])
@cooldown(2, 5, BucketType.user)
@bot_has_permissions(embed_links=True)
async def stats(self, ctx):
"""Get stats about this bot"""
pid = os.getpid()
process = psutil.Process(pid)
uptime = time.time() - process.create_time()
d = datetime.utcfromtimestamp(uptime)
uptime = f'{d.day-1}d {d.hour}h {d.minute}m {d.second}s'
current_memory = round(process.memory_info().rss / 1048576, 2)
memory_usage = f' Current: {current_memory}MB'
if sys.platform == 'linux':
try:
# use pmap to find the memory usage of this process and turn it to megabytes
# Since shlex doesn't care about pipes | I have to do this
s1 = subprocess.Popen(shlex.split('pmap %s' % os.getpid()),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
s2 = subprocess.Popen(
shlex.split(r'grep -Po "total +\K([0-9])+(?=K)"'),
stdin=s1.stdout, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
s1.stdin.close()
memory = round(int(s2.communicate()[0].decode('utf-8')) / 1024, 1)
usable_memory = str(memory) + 'MB'
memory_usage = f'{current_memory}MB/{usable_memory} ({(current_memory/memory*100):.1f}%)'
except:
logger.exception('Failed to get extended mem usage')
raise
users = 0
for _ in self.bot.get_all_members():
users += 1
guilds = len(self.bot.guilds)
try:
# Get the last time the bot was updated
last_updated = format_rfc2822(os.stat('.git/refs/heads/master').st_mtime, localtime=True)
except OSError:
logger.exception('Failed to get last updated')
last_updated = 'N/A'
sql = 'SELECT * FROM command_stats ORDER BY uses DESC LIMIT 3'
try:
rows = await self.bot.dbutil.fetch(sql)
except PostgresError:
logger.exception('Failed to get command stats')
top_cmd = 'Failed to get command stats'
else:
top_cmd = ''
i = 1
for row in rows:
name = row['parent']
cmd = row['cmd']
if cmd:
name += ' ' + cmd
top_cmd += f'{i}. `{name}` with {row["uses"]} uses\n'
i += 1
embed = discord.Embed(title='Stats', colour=random_color())
embed.add_field(name='discord.py version', value=f"{discord.__version__}")
embed.add_field(name='Uptime', value=uptime)
if ctx.guild and ctx.guild.shard_id is not None:
embed.add_field(name='Shard', value=ctx.guild.shard_id)
embed.add_field(name='Servers', value=str(guilds))
embed.add_field(name='Users', value=str(users))
embed.add_field(name='Memory usage', value=memory_usage)
embed.add_field(name='Last updated', value=last_updated)
embed.add_field(name='Most used commands', value=top_cmd)
embed.set_thumbnail(url=get_avatar(self.bot.user))
embed.set_author(name=self.bot.user.name, icon_url=get_avatar(self.bot.user))
await ctx.send(embed=embed)
@command(name='roles', no_pm=True)
@cooldown(1, 10, type=BucketType.guild)
async def get_roles(self, ctx, page=''):
"""Get roles on this server"""
guild_roles = sorted(ctx.guild.roles, key=lambda r: r.name)
idx = 0
if page:
try:
idx = int(page) - 1
if idx < 0:
return await ctx.send('Index must be bigger than 0')
except ValueError:
return await ctx.send('%s is not a valid integer' % page, delete_after=30)
roles = 'A total of %s roles\n' % len(guild_roles)
for role in guild_roles:
roles += '{}: {}\n'.format(role.name, role.mention)
roles = split_string(roles, splitter='\n', maxlen=1000)
await send_paged_message(ctx, roles, starting_idx=idx,
page_method=lambda p, i: '```{}```'.format(p))
@command(aliases=['created_at', 'snowflake', 'snoflake'])
@cooldown(1, 5, type=BucketType.guild)
async def snowflake_time(self, ctx, id: int):
"""Gets creation date from the specified discord id in UTC"""
try:
int(id)
except ValueError:
return await ctx.send("{} isn't a valid integer".format(id))
await ctx.send(str(discord.utils.snowflake_time(id)))
@command()
@cooldown(1, 5, BucketType.user)
async def birthday(self, ctx, *, user: clean_content):
url = 'http://itsyourbirthday.today/#' + quote(user)
await ctx.send(url)
@command(name='unzalgo')
@cooldown(2, 5, BucketType.guild)
async def unzalgo_(self, ctx, *, text=None):
"""Unzalgo text
if text is not specified a cache lookup on zalgo text is done for the last 100 msgs
and the first found zalgo text is unzalgo'd"""
if text is None:
messages = self.bot._connection._messages
for i in range(-1, -100, -1):
try:
msg = messages[i]
except IndexError:
break
if msg.channel.id != ctx.channel.id:
continue
if is_zalgo(msg.content):
text = msg.content
break
if text is None:
await ctx.send("Didn't find a zalgo message")
return
await ctx.send(unzalgo(text))
@command()
@cooldown(1, 120, BucketType.user)
async def feedback(self, ctx, *, feedback):
"""
Send feedback of the bot.
Bug reports should go to https://github.com/s0hvaperuna/Not-a-bot/issues
"""
webhook = self.bot.config.feedback_webhook
if not webhook:
return await ctx.send('This command is unavailable atm')
e = discord.Embed(title='Feedback', description=feedback)
author = ctx.author
avatar = get_avatar(author)
e.set_thumbnail(url=avatar)
e.set_footer(text=str(author), icon_url=avatar)
e.add_field(name='Guild', value=f'{ctx.guild.id}\n{ctx.guild.name}')
json = {'embeds': [e.to_dict()],
'avatar_url': avatar,
'username': ctx.author.name,
'wait': True}
headers = {'Content-type': 'application/json'}
success = False
try:
r = await self.bot.aiohttp_client.post(webhook, json=json, headers=headers)
except aiohttp.ClientError:
logger.exception('')
else:
status = str(r.status)
# Accept 2xx status codes
if status.startswith('2'):
success = True
if success:
await ctx.send('Feedback sent')
else:
await ctx.send('Failed to send feedback')
@command(aliases=['bug'])
@cooldown(1, 10, BucketType.user)
async def bugreport(self, ctx):
"""For reporting bugs"""
await ctx.send('If you have noticed a bug in my bot report it here https://github.com/s0hv/Not-a-bot/issues\n'
f"If you don't have a github account or are just too lazy you can use {ctx.prefix}feedback for reporting as well")
@command(ingore_extra=True)
@cooldown(1, 10, BucketType.guild)
async def vote(self, ctx):
"""Pls vote thx"""
await ctx.send('https://top.gg/bot/214724376669585409/vote')
@command(aliases=['sellout'])
@cooldown(1, 10)
async def donate(self, ctx):
"""
Bot is not free to host. Donations go straight to server costs
"""
await ctx.send('If you want to support bot in server costs donate to https://www.paypal.me/s0hvaperuna\n'
'Alternatively you can use my DigitalOcean referral link https://m.do.co/c/84da65db5e5b which will help out in server costs as well')
@staticmethod
def find_emoji(emojis, name):
for e in emojis:
if e.name.lower() == name:
return e
@command()
@cooldown(1, 5, BucketType.user)
async def emojify(self, ctx, *, text: str):
"""Turns your text without emotes to text with discord custom emotes
To blacklist words from emoji search use a quoted string at the
beginning of the command denoting those words
e.g. emojify "blacklisted words here" rest of the sentence"""
emojis = ctx.bot.emojis
new_text = ''
word_blacklist = None
# Very simple method to parse word blacklist
if text.startswith('"'):
idx = text.find('"', 1) # Find second quote idx
word_blacklist = text[1:idx]
if word_blacklist:
text = text[idx+1:]
word_blacklist = [s.lower().strip(',.') for s in word_blacklist.split(' ')]
emoji_cache = {}
lines = text.split('\n')
for line in lines:
for s in line.split(' '):
es = s.lower().strip(',.:')
# We don't want to look for emotes that are only a couple characters long
if len(s) <= 3 or (word_blacklist and es in word_blacklist):
new_text += s + ' '
continue
e = emoji_cache.get(es)
if not e:
e = self.find_emoji(emojis, es)
if e is None:
e = s
else:
e = str(e)
emoji_cache[es] = e
new_text += e + ' '
new_text += '\n'
await ctx.send(new_text[:2000], undoable=True)
@command(name='pip')
@cooldown(1, 5, BucketType.channel)
@bot_has_permissions(embed_links=True)
async def get_package(self, ctx, *, name):
"""Get a package from pypi"""
if SearchCommand is None:
return await ctx.send('Not supported')
def search():
try:
search_command = SearchCommand()
options, _ = search_command.parse_args([])
hits = search_command.search(name, options)
if hits:
return hits[0]
except:
logger.exception('Failed to search package from PyPi')
raise
hit = await self.bot.loop.run_in_executor(self.bot.threadpool, search)
if not hit:
return await ctx.send('No matches')
async with self.bot.aiohttp_client.get(f'https://pypi.org/pypi/{quote(hit["name"])}/json') as r:
if r.status != 200:
return await ctx.send(f'HTTP error {r.status}')
json = await r.json()
info = json['info']
description = info['description']
if len(description) > 1000:
description = split_string(description, splitter='\n', maxlen=1000)[0] + '...'
embed = discord.Embed(title=hit['name'],
description=description,
url=info["package_url"])
embed.add_field(name='Author', value=info['author'] or 'None')
embed.add_field(name='Version', value=info['version'] or 'None')
embed.add_field(name='License', value=info['license'] or 'None')
await ctx.send(embed=embed)
async def get_timezone(self, ctx, user_id: int):
tz = await self.bot.dbutil.get_timezone(user_id)
if tz:
try:
return await ctx.bot.loop.run_in_executor(ctx.bot.threadpool, pytz.timezone, tz)
except pytz.UnknownTimeZoneError:
pass
return pytz.FixedOffset(0)
@command(aliases=['tz'])
@cooldown(2, 7)
async def timezone(self, ctx, *, timezone: str=None):
"""
Set or view your timezone. If timezone isn't given shows your current timezone
If timezone is given sets your current timezone to that.
Summer time should be supported for any timezone that's not a plain utc offset.
Due to [technical reasons](https://en.wikipedia.org/wiki/Tz_database#Area)
the sign in gmt offsets is flipped. e.g. UTC+5 offset is GMT-5
Examples:
• `{prefix}{name} utc+4`
• `{prefix}{name} London`
• `{prefix}{name} EST`
"""
user = ctx.author
if not timezone:
tz = await self.get_timezone(ctx, user.id)
s = tz.localize(datetime.utcnow()).strftime('Your current timezone is UTC %z')
await ctx.send(s)
return
tz = fuzzy_tz.get(timezone.lower())
# Extra info to be sent
extra = ''
if not tz:
tz = tz_dict.get(timezone.upper())
if tz:
tz = fuzzy_tz.get(f'utc{int(tz)//3600:+d}')
if not tz:
await ctx.send(f'Timezone {timezone} not found')
ctx.command.undo_use(ctx)
return
if tz.startswith('Etc/GMT'):
extra = "UTC offset used. Consider using a locality based timezone instead. " \
"You can set it usually by using your country's capital's name or your country's name as long as it has a single timezone\n" \
"The sign in the GMT timezone is flipped due to technical reasons."
if await self.bot.dbutil.set_timezone(user.id, tz):
await ctx.send(f'Timezone set to {tz}\n{extra}')
else:
await ctx.send('Failed to set timezone because of an error')
@command(name='timedelta', aliases=['td'],
usage="[duration or date] [timezones and users]")
@cooldown(1, 3, BucketType.user)
async def timedelta_(self, ctx, *, args=''):
"""
Get a date that is in the amount of duration given.
To get past dates start your duration with `-`
Time format is `1d 1h 1m 1s` where each one is optional.
When no time is given it is interpreted as 0 seconds.
You can also give a date and duration will be calculated as the time to that point in time.
Timezone will be user timezone by default but you can specify the date utc offset with e.g. UTC+3
If the date doesn't have spaces in it, put it inside quotes. In ambiguous 3-integer dates day is assumed to be first
e.g. `"14:00"`, `14:00 UTC+1`, `"Mon 14:00 UTC-3"`
You can also specify which timezones to use for comparison.
By default your own timezone is always put at the bottom (defaults to UTC).
Timezones can be just an integer determining the UTC offset in hours or
a city name or a country (Not all countries and cities are accepted as input).
Remember to use quotes if the city name contains spaces.
You can also give users and their timezone is used if they've set it
Max given timezones is 5.
Examples
`{prefix}{name} 1h ny`
`{prefix}{name} "Mon 22:00 UTC-3"`
`{prefix}{name} "Jan 4th 10:00" @user berlin`
"""
addition = True
if args.startswith('-'):
addition = False
args = args[1:]
duration, timezones = parse_timeout(args)
# Used to guess if time with quotes might've been given
# This way we can give the correct portion of the string to dateutil parser
quote_start = timezones.startswith('"')
user_tz = await self.get_timezone(ctx, ctx.author.id)
timezones = shlex.split(timezones)
if not duration and timezones:
try:
if quote_start:
t = timezones[0]
else:
t = ' '.join(timezones[:2])
def get_date():
def get_tz(name, offset):
# If name specified get by name
if name:
found_tz = tz_dict.get(name)
if not found_tz:
# Default value cannot be None or empty string
found_tz = gettz(fuzzy_tz.get(name.lower(), 'a'))
return found_tz
# if offset specified get by utc offset and reverse it
# because https://stackoverflow.com/questions/53076575/time-zones-etc-gmt-why-it-is-other-way-round
elif offset:
return offset*-1
return parser.parse(t.upper(), tzinfos=get_tz, parserinfo=parserinfo)
date = await self.bot.run_async(get_date)
if not date.tzinfo:
duration = user_tz.localize(date) - datetime.now(user_tz)
else:
# UTC timezones are inverted in dateutil UTC+3 gives UTC-3
tz = pytz.FixedOffset(date.tzinfo.utcoffset(datetime.utcnow()).total_seconds()//60)
duration = date.replace(tzinfo=tz) - datetime.now(user_tz)
addition = duration.days >= 0
if not addition:
duration *= -1
if quote_start:
timezones = timezones[1:]
else:
timezones = timezones[2:]
except (ValueError, OverflowError):
pass
if len(timezones) > 5:
await ctx.send('Over 5 timezones given. Give fewer timezones (Use quotes if a tz has spaces)')
return
async def add_time(dt):
try:
if addition:
return dt + duration
else:
return dt - duration
except OverflowError:
await ctx.send('Failed to get new date because of an Overflow error. Try giving a smaller duration')
tz_converter = TzConverter()
user_converter = PossibleUser()
s = ''
for timezone in timezones:
try:
tz = await tz_converter.convert(ctx, timezone)
except BadArgument as e:
try:
user = await user_converter.convert(ctx, timezone)
if isinstance(user, int):
tz = await self.get_timezone(ctx, user)
else:
tz = await self.get_timezone(ctx, user.id)
except BadArgument:
raise e
dt = await add_time(datetime.now(tz))
if not dt:
return
s += f'`{dt.strftime("%Y-%m-%d %H:%M UTC%z")}` `{tz.zone}`\n'
dt = await add_time(datetime.now(user_tz))
if not dt:
return
s += f'`{dt.strftime("%Y-%m-%d %H:%M UTC%z")}` `{user_tz.zone}`\n'
td = format_timedelta(duration, accuracy=DateAccuracy.Day-DateAccuracy.Minute)
if addition:
s += f'which is in {td}'
else:
s += f'which was {td} ago'
await ctx.send(s)
@command(aliases=['st'])
@cooldown(1, 4, BucketType.user)
async def sort_tags(self, ctx, tagname, *, tags):
"""Gets missing tag indexes from a 42 bot tag search.
The first tagname must be the one that is gonna be looked for"""
tagname = tagname.rstrip(',')
tags = tags.split(', ')
match = re.match(r'(.+?)(\d+)', tagname)
numbers = set()
if match:
tagname, number = match.groups()
numbers.add(int(number))
else:
numbers.add(0)
tagname = tagname.lstrip('\u200b')
tl = len(tagname)
for tag in tags:
if tag.endswith('...'):
continue
if tagname not in tag:
continue
if tagname == tag:
numbers.add(0)
continue
try:
# Ignore long numbers
n = tag[tl:]
if len(n) > 4:
continue
numbers.add(int(n))
except ValueError:
continue
if not numbers:
await ctx.send(f'No other numbered tags found for {tagname}')
return
numbers = list(sorted(numbers))
last = numbers[0]
if last > 2:
s = f'-{last - 1}, '
else:
s = ''
for i in numbers[1:]:
delta = i - last
if delta > 4:
s += f'{last + 1}-{i - 1}, '
elif delta == 3:
s += f'{last + 1}, {i - 1}, '
elif delta == 2:
s += f'{i - 1}, '
last = i
s += f'{last+1}-'
await ctx.send(f'Missing tag numbers for {tagname} are {s}')
def setup(bot):
bot.add_cog(Utilities(bot))
|
s0hvaperuna/Not-a-bot
|
cogs/utils.py
|
Python
|
mit
| 28,689
|
'''
en este archivo se almacenaran todas las caracteristicas de la pantalla, como los colores ancho y alto
'''
ANCHO=600
ALTO=600
BLANCO=(255,255,255)
NEGRO=(0,0,0)
ROJO=(255,0,0)
AZUL=(0,0,255)
VERDE=(0,255,0)
|
Jofemago/Computacion-Grafica
|
Disparos/TareaDisparos/configuraciones.py
|
Python
|
mit
| 212
|
from setuptools import setup, find_packages
setup(name='BIOMD0000000199',
version=20140916,
description='BIOMD0000000199 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/BIOMD0000000199',
maintainer='Stanley Gu',
maintainer_url='[email protected]',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
)
|
biomodels/BIOMD0000000199
|
setup.py
|
Python
|
cc0-1.0
| 377
|
#!/usr/bin/env python
"""
NPR 2016-03-06
http://www.npr.org/2016/03/06/469327504/five-letters-with-which-to-play-heres-a-puzzle-to-blow-you-away
Bail, Nail, and Mail are three four-letter words that differ only by their first letters.
And those first letters (B, N, and M) happen to be adjacent on a computer keyboard.
Can you think of five four-letter words that have the same property --
that is, they're identical except for their first letters, with those first letters being adjacent on the keyboard?
All five words must be ones that everyone knows. Capitalized words and plurals are not allowed. What words are they?
"""
from nltk.corpus import brown
from collections import defaultdict
common_words = set([x.lower() for x in brown.words() if len(x) == 4])
# Group words by last three letters
d = defaultdict(set)
for w in common_words:
d[w[1:]].add(w[0])
# Rows on a keyboard
rows = ['qwertyuiop','asdfghjkl','zxcvbnm']
adjacent_five = []
for row in rows:
for i in range(len(row)-4):
adjacent_five.append(row[i:i+5])
#%%
for k,v in d.iteritems():
if len(v) >= 5:
for five in adjacent_five:
if set(five).issubset(v):
for first_letter in five:
print first_letter + k,
print
|
boisvert42/npr-puzzle-python
|
2016/0306_keyboard_adjacent.py
|
Python
|
cc0-1.0
| 1,281
|
# -*- coding: utf-8 -*-
"""
################################################
Plataforma ActivUFRJ
################################################
:Author: *Núcleo de Computação Eletrônica (NCE/UFRJ)*
:Contact: [email protected]
:Date: $Date: 2009-2010 $
:Status: This is a "work in progress"
:Revision: $Revision: 0.01 $
:Home: `LABASE `__
:Copyright: ©2009, `GPL
"""
from couchdb import Server
from uuid import uuid4
_DOCBASES = ['log', 'log2']
_EMPTYLOG = lambda: dict(
sujeito = "",
verbo = "",
objeto = "",
tipo = "",
link = "",
news = "True", # todos os documentos do log velho que não tiverem o campo news
# serão copiados para o log novo com news="True"
# este valor será armazenado sempre como string
data_inclusao = ""
)
class Activ(Server):
"Active database"
log = {}
log2 = {}
def __init__(self, url):
Server.__init__(self, url)
act = self
test_and_create = lambda doc: doc in act and act[doc] or act.create(doc)
for attribute in _DOCBASES:
setattr(Activ, attribute, test_and_create(attribute))
def erase_database(self):
'erase tables'
for table in _DOCBASES:
try:
del self[table]
except:
pass
__ACTIV = Activ('http://127.0.0.1:5984/')
LOG = __ACTIV.log
LOG2 = __ACTIV.log2
def main():
print u"iniciando conversão"
for user_id in LOG:
if "_design" not in user_id:
#print "-------------------"
print user_id
log_data = dict()
log_data.update(LOG[user_id])
for item in log_data["registros"]:
log_new = _EMPTYLOG()
log_new.update(item)
if log_new["news"] is True:
log_new["news"] = "True"
if log_new["news"] is False:
log_new["news"] = "False"
#print log_new
id = uuid4().hex
LOG2[id] = log_new
print u"conversão finalizada."
if __name__ == "__main__":
main()
|
labase/activnce
|
main/utils/0_14_0207convertlogformat.py
|
Python
|
gpl-2.0
| 2,392
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
The main file for the Crazyflie control application.
"""
import logging
import sys
import cfclient
import cfclient.ui.tabs
import cfclient.ui.toolboxes
import cflib.crtp
from cfclient.ui.dialogs.about import AboutDialog
from cfclient.ui.dialogs.bootloader import BootloaderDialog
from cfclient.utils.config import Config
from cfclient.utils.config_manager import ConfigManager
from cfclient.utils.input import JoystickReader
from cfclient.utils.logconfigreader import LogConfigReader
from cfclient.utils.zmq_led_driver import ZMQLEDDriver
from cfclient.utils.zmq_param import ZMQParamAccess
from cflib.crazyflie import Crazyflie
from cflib.crazyflie.log import LogConfig
from cflib.crazyflie.mem import MemoryElement
from PyQt5 import QtWidgets
from PyQt5 import uic
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtCore import QDir
from PyQt5.QtCore import QThread
from PyQt5.QtCore import QUrl
from PyQt5.QtWidgets import QAction
from PyQt5.QtWidgets import QActionGroup
from PyQt5.QtGui import QDesktopServices
from PyQt5.QtWidgets import QLabel
from PyQt5.QtWidgets import QMenu
from PyQt5.QtWidgets import QMessageBox
from .dialogs.cf1config import Cf1ConfigDialog
from .dialogs.cf2config import Cf2ConfigDialog
from .dialogs.inputconfigdialogue import InputConfigDialogue
from .dialogs.logconfigdialogue import LogConfigDialogue
__author__ = 'Bitcraze AB'
__all__ = ['MainUI']
logger = logging.getLogger(__name__)
INTERFACE_PROMPT_TEXT = 'Select an interface'
(main_window_class,
main_windows_base_class) = (uic.loadUiType(cfclient.module_path +
'/ui/main.ui'))
class MyDockWidget(QtWidgets.QDockWidget):
closed = pyqtSignal()
def closeEvent(self, event):
super(MyDockWidget, self).closeEvent(event)
self.closed.emit()
class UIState:
DISCONNECTED = 0
CONNECTING = 1
CONNECTED = 2
SCANNING = 3
class BatteryStates:
BATTERY, CHARGING, CHARGED, LOW_POWER = list(range(4))
COLOR_BLUE = '#3399ff'
COLOR_GREEN = '#00ff60'
COLOR_RED = '#cc0404'
def progressbar_stylesheet(color):
return """
QProgressBar {
border: 1px solid #333;
background-color: transparent;
}
QProgressBar::chunk {
background-color: """ + color + """;
}
"""
class MainUI(QtWidgets.QMainWindow, main_window_class):
connectionLostSignal = pyqtSignal(str, str)
connectionInitiatedSignal = pyqtSignal(str)
batteryUpdatedSignal = pyqtSignal(int, object, object)
connectionDoneSignal = pyqtSignal(str)
connectionFailedSignal = pyqtSignal(str, str)
disconnectedSignal = pyqtSignal(str)
linkQualitySignal = pyqtSignal(int)
_input_device_error_signal = pyqtSignal(str)
_input_discovery_signal = pyqtSignal(object)
_log_error_signal = pyqtSignal(object, str)
def __init__(self, *args):
super(MainUI, self).__init__(*args)
self.setupUi(self)
# Restore window size if present in the config file
try:
size = Config().get("window_size")
self.resize(size[0], size[1])
except KeyError:
pass
######################################################
# By lxrocks
# 'Skinny Progress Bar' tweak for Yosemite
# Tweak progress bar - artistic I am not - so pick your own colors !!!
# Only apply to Yosemite
######################################################
import platform
if platform.system() == 'Darwin':
(Version, junk, machine) = platform.mac_ver()
logger.info("This is a MAC - checking if we can apply Progress "
"Bar Stylesheet for Yosemite Skinny Bars ")
yosemite = (10, 10, 0)
tVersion = tuple(map(int, (Version.split("."))))
if tVersion >= yosemite:
logger.info("Found Yosemite - applying stylesheet")
tcss = """
QProgressBar {
border: 1px solid grey;
border-radius: 5px;
text-align: center;
}
QProgressBar::chunk {
background-color: """ + COLOR_BLUE + """;
}
"""
self.setStyleSheet(tcss)
else:
logger.info("Pre-Yosemite - skinny bar stylesheet not applied")
######################################################
self.cf = Crazyflie(ro_cache=None,
rw_cache=cfclient.config_path + "/cache")
cflib.crtp.init_drivers(enable_debug_driver=Config()
.get("enable_debug_driver"))
zmq_params = ZMQParamAccess(self.cf)
zmq_params.start()
zmq_leds = ZMQLEDDriver(self.cf)
zmq_leds.start()
self.scanner = ScannerThread()
self.scanner.interfaceFoundSignal.connect(self.foundInterfaces)
self.scanner.start()
# Create and start the Input Reader
self._statusbar_label = QLabel("No input-device found, insert one to"
" fly.")
self.statusBar().addWidget(self._statusbar_label)
self.joystickReader = JoystickReader()
self._active_device = ""
# self.configGroup = QActionGroup(self._menu_mappings, exclusive=True)
self._mux_group = QActionGroup(self._menu_inputdevice, exclusive=True)
# TODO: Need to reload configs
# ConfigManager().conf_needs_reload.add_callback(self._reload_configs)
self.cf.connection_failed.add_callback(
self.connectionFailedSignal.emit)
self.connectionFailedSignal.connect(self._connection_failed)
self._input_device_error_signal.connect(
self._display_input_device_error)
self.joystickReader.device_error.add_callback(
self._input_device_error_signal.emit)
self._input_discovery_signal.connect(self.device_discovery)
self.joystickReader.device_discovery.add_callback(
self._input_discovery_signal.emit)
# Hide the 'File' menu on OS X, since its only item, 'Exit', gets
# merged into the application menu.
if sys.platform == 'darwin':
self.menuFile.menuAction().setVisible(False)
# Connect UI signals
self.logConfigAction.triggered.connect(self._show_connect_dialog)
self.interfaceCombo.currentIndexChanged['QString'].connect(
self.interfaceChanged)
self.connectButton.clicked.connect(self._connect)
self.scanButton.clicked.connect(self._scan)
self.menuItemConnect.triggered.connect(self._connect)
self.menuItemConfInputDevice.triggered.connect(
self._show_input_device_config_dialog)
self.menuItemExit.triggered.connect(self.closeAppRequest)
self.batteryUpdatedSignal.connect(self._update_battery)
self._menuitem_rescandevices.triggered.connect(self._rescan_devices)
self._menuItem_openconfigfolder.triggered.connect(
self._open_config_folder)
self.address.setValue(0xE7E7E7E7E7)
self._auto_reconnect_enabled = Config().get("auto_reconnect")
self.autoReconnectCheckBox.toggled.connect(
self._auto_reconnect_changed)
self.autoReconnectCheckBox.setChecked(Config().get("auto_reconnect"))
self.joystickReader.input_updated.add_callback(
self.cf.commander.send_setpoint)
self.joystickReader.assisted_input_updated.add_callback(
self.cf.commander.send_velocity_world_setpoint)
self.joystickReader.heighthold_input_updated.add_callback(
self.cf.commander.send_zdistance_setpoint)
self.joystickReader.hover_input_updated.add_callback(
self.cf.commander.send_hover_setpoint)
# Connection callbacks and signal wrappers for UI protection
self.cf.connected.add_callback(self.connectionDoneSignal.emit)
self.connectionDoneSignal.connect(self._connected)
self.cf.disconnected.add_callback(self.disconnectedSignal.emit)
self.disconnectedSignal.connect(self._disconnected)
self.cf.connection_lost.add_callback(self.connectionLostSignal.emit)
self.connectionLostSignal.connect(self._connection_lost)
self.cf.connection_requested.add_callback(
self.connectionInitiatedSignal.emit)
self.connectionInitiatedSignal.connect(self._connection_initiated)
self._log_error_signal.connect(self._logging_error)
self.batteryBar.setTextVisible(False)
self.batteryBar.setStyleSheet(progressbar_stylesheet(COLOR_BLUE))
self.linkQualityBar.setTextVisible(False)
self.linkQualityBar.setStyleSheet(progressbar_stylesheet(COLOR_BLUE))
# Connect link quality feedback
self.cf.link_quality_updated.add_callback(self.linkQualitySignal.emit)
self.linkQualitySignal.connect(
lambda percentage: self.linkQualityBar.setValue(percentage))
self._selected_interface = None
self._initial_scan = True
self._scan()
# Parse the log configuration files
self.logConfigReader = LogConfigReader(self.cf)
self._current_input_config = None
self._active_config = None
self._active_config = None
self.inputConfig = None
# Add things to helper so tabs can access it
cfclient.ui.pluginhelper.cf = self.cf
cfclient.ui.pluginhelper.inputDeviceReader = self.joystickReader
cfclient.ui.pluginhelper.logConfigReader = self.logConfigReader
self.logConfigDialogue = LogConfigDialogue(cfclient.ui.pluginhelper)
self._bootloader_dialog = BootloaderDialog(cfclient.ui.pluginhelper)
self._cf2config_dialog = Cf2ConfigDialog(cfclient.ui.pluginhelper)
self._cf1config_dialog = Cf1ConfigDialog(cfclient.ui.pluginhelper)
self.menuItemBootloader.triggered.connect(self._bootloader_dialog.show)
self._about_dialog = AboutDialog(cfclient.ui.pluginhelper)
self.menuItemAbout.triggered.connect(self._about_dialog.show)
self._menu_cf2_config.triggered.connect(self._cf2config_dialog.show)
self._menu_cf1_config.triggered.connect(self._cf1config_dialog.show)
# Load and connect tabs
self.tabsMenuItem = QMenu("Tabs", self.menuView, enabled=True)
self.menuView.addMenu(self.tabsMenuItem)
# self.tabsMenuItem.setMenu(QtWidgets.QMenu())
tabItems = {}
self.loadedTabs = []
for tabClass in cfclient.ui.tabs.available:
tab = tabClass(self.tabs, cfclient.ui.pluginhelper)
item = QtWidgets.QAction(tab.getMenuName(), self, checkable=True)
item.toggled.connect(tab.toggleVisibility)
self.tabsMenuItem.addAction(item)
tabItems[tab.getTabName()] = item
self.loadedTabs.append(tab)
if not tab.enabled:
item.setEnabled(False)
# First instantiate all tabs and then open them in the correct order
try:
for tName in Config().get("open_tabs").split(","):
t = tabItems[tName]
if (t is not None and t.isEnabled()):
# Toggle though menu so it's also marked as open there
t.toggle()
except Exception as e:
logger.warning("Exception while opening tabs [{}]".format(e))
# Loading toolboxes (A bit of magic for a lot of automatic)
self.toolboxesMenuItem = QMenu("Toolboxes", self.menuView,
enabled=True)
self.menuView.addMenu(self.toolboxesMenuItem)
self.toolboxes = []
for t_class in cfclient.ui.toolboxes.toolboxes:
toolbox = t_class(cfclient.ui.pluginhelper)
dockToolbox = MyDockWidget(toolbox.getName())
dockToolbox.setWidget(toolbox)
self.toolboxes += [dockToolbox, ]
# Add menu item for the toolbox
item = QtWidgets.QAction(toolbox.getName(), self)
item.setCheckable(True)
item.triggered.connect(self.toggleToolbox)
self.toolboxesMenuItem.addAction(item)
dockToolbox.closed.connect(lambda: self.toggleToolbox(False))
# Setup some introspection
item.dockToolbox = dockToolbox
item.menuItem = item
dockToolbox.dockToolbox = dockToolbox
dockToolbox.menuItem = item
# References to all the device sub-menus in the "Input device" menu
self._all_role_menus = ()
# Used to filter what new devices to add default mapping to
self._available_devices = ()
# Keep track of mux nodes so we can enable according to how many
# devices we have
self._all_mux_nodes = ()
# Check which Input muxes are available
self._mux_group = QActionGroup(self._menu_inputdevice, exclusive=True)
for m in self.joystickReader.available_mux():
node = QAction(m.name,
self._menu_inputdevice,
checkable=True,
enabled=False)
node.toggled.connect(self._mux_selected)
self._mux_group.addAction(node)
self._menu_inputdevice.addAction(node)
self._all_mux_nodes += (node,)
mux_subnodes = ()
for name in m.supported_roles():
sub_node = QMenu(" {}".format(name),
self._menu_inputdevice,
enabled=False)
self._menu_inputdevice.addMenu(sub_node)
mux_subnodes += (sub_node,)
self._all_role_menus += ({"muxmenu": node,
"rolemenu": sub_node},)
node.setData((m, mux_subnodes))
self._mapping_support = True
def interfaceChanged(self, interface):
if interface == INTERFACE_PROMPT_TEXT:
self._selected_interface = None
else:
self._selected_interface = interface
self._update_ui_state()
def foundInterfaces(self, interfaces):
selected_interface = self._selected_interface
self.interfaceCombo.clear()
self.interfaceCombo.addItem(INTERFACE_PROMPT_TEXT)
formatted_interfaces = []
for i in interfaces:
if len(i[1]) > 0:
interface = "%s - %s" % (i[0], i[1])
else:
interface = i[0]
formatted_interfaces.append(interface)
self.interfaceCombo.addItems(formatted_interfaces)
if self._initial_scan:
self._initial_scan = False
try:
if len(Config().get("link_uri")) > 0:
formatted_interfaces.index(Config().get("link_uri"))
selected_interface = Config().get("link_uri")
except KeyError:
# The configuration for link_uri was not found
pass
except ValueError:
# The saved URI was not found while scanning
pass
if len(interfaces) == 1 and selected_interface is None:
selected_interface = interfaces[0][0]
newIndex = 0
if selected_interface is not None:
try:
newIndex = formatted_interfaces.index(selected_interface) + 1
except ValueError:
pass
self.interfaceCombo.setCurrentIndex(newIndex)
self.uiState = UIState.DISCONNECTED
self._update_ui_state()
def _update_ui_state(self):
if self.uiState == UIState.DISCONNECTED:
self.setWindowTitle("Not connected")
canConnect = self._selected_interface is not None
self.menuItemConnect.setText("Connect to Crazyflie")
self.menuItemConnect.setEnabled(canConnect)
self.connectButton.setText("Connect")
self.connectButton.setToolTip(
"Connect to the Crazyflie on the selected interface")
self.connectButton.setEnabled(canConnect)
self.scanButton.setText("Scan")
self.scanButton.setEnabled(True)
self.address.setEnabled(True)
self.batteryBar.setValue(3000)
self._menu_cf2_config.setEnabled(False)
self._menu_cf1_config.setEnabled(True)
self.linkQualityBar.setValue(0)
self.menuItemBootloader.setEnabled(True)
self.logConfigAction.setEnabled(False)
self.interfaceCombo.setEnabled(True)
elif self.uiState == UIState.CONNECTED:
s = "Connected on %s" % self._selected_interface
self.setWindowTitle(s)
self.menuItemConnect.setText("Disconnect")
self.menuItemConnect.setEnabled(True)
self.connectButton.setText("Disconnect")
self.connectButton.setToolTip("Disconnect from the Crazyflie")
self.scanButton.setEnabled(False)
self.logConfigAction.setEnabled(True)
# Find out if there's an I2C EEPROM, otherwise don't show the
# dialog.
if len(self.cf.mem.get_mems(MemoryElement.TYPE_I2C)) > 0:
self._menu_cf2_config.setEnabled(True)
self._menu_cf1_config.setEnabled(False)
elif self.uiState == UIState.CONNECTING:
s = "Connecting to {} ...".format(self._selected_interface)
self.setWindowTitle(s)
self.menuItemConnect.setText("Cancel")
self.menuItemConnect.setEnabled(True)
self.connectButton.setText("Cancel")
self.connectButton.setToolTip("Cancel connecting to the Crazyflie")
self.scanButton.setEnabled(False)
self.address.setEnabled(False)
self.menuItemBootloader.setEnabled(False)
self.interfaceCombo.setEnabled(False)
elif self.uiState == UIState.SCANNING:
self.setWindowTitle("Scanning ...")
self.connectButton.setText("Connect")
self.menuItemConnect.setEnabled(False)
self.connectButton.setText("Connect")
self.connectButton.setEnabled(False)
self.scanButton.setText("Scanning...")
self.scanButton.setEnabled(False)
self.address.setEnabled(False)
self.menuItemBootloader.setEnabled(False)
self.interfaceCombo.setEnabled(False)
@pyqtSlot(bool)
def toggleToolbox(self, display):
menuItem = self.sender().menuItem
dockToolbox = self.sender().dockToolbox
if display and not dockToolbox.isVisible():
dockToolbox.widget().enable()
self.addDockWidget(dockToolbox.widget().preferedDockArea(),
dockToolbox)
dockToolbox.show()
elif not display:
dockToolbox.widget().disable()
self.removeDockWidget(dockToolbox)
dockToolbox.hide()
menuItem.setChecked(False)
def _rescan_devices(self):
self._statusbar_label.setText("No inputdevice connected!")
self._menu_devices.clear()
self._active_device = ""
self.joystickReader.stop_input()
# for c in self._menu_mappings.actions():
# c.setEnabled(False)
# devs = self.joystickReader.available_devices()
# if (len(devs) > 0):
# self.device_discovery(devs)
def _show_input_device_config_dialog(self):
self.inputConfig = InputConfigDialogue(self.joystickReader)
self.inputConfig.show()
def _auto_reconnect_changed(self, checked):
self._auto_reconnect_enabled = checked
Config().set("auto_reconnect", checked)
logger.info("Auto reconnect enabled: {}".format(checked))
def _show_connect_dialog(self):
self.logConfigDialogue.show()
def _update_battery(self, timestamp, data, logconf):
self.batteryBar.setValue(int(data["pm.vbat"] * 1000))
color = COLOR_BLUE
# TODO firmware reports fully-charged state as 'Battery',
# rather than 'Charged'
if data["pm.state"] in [BatteryStates.CHARGING, BatteryStates.CHARGED]:
color = COLOR_GREEN
elif data["pm.state"] == BatteryStates.LOW_POWER:
color = COLOR_RED
self.batteryBar.setStyleSheet(progressbar_stylesheet(color))
def _connected(self):
self.uiState = UIState.CONNECTED
self._update_ui_state()
Config().set("link_uri", str(self._selected_interface))
lg = LogConfig("Battery", 1000)
lg.add_variable("pm.vbat", "float")
lg.add_variable("pm.state", "int8_t")
try:
self.cf.log.add_config(lg)
lg.data_received_cb.add_callback(self.batteryUpdatedSignal.emit)
lg.error_cb.add_callback(self._log_error_signal.emit)
lg.start()
except KeyError as e:
logger.warning(str(e))
mems = self.cf.mem.get_mems(MemoryElement.TYPE_DRIVER_LED)
if len(mems) > 0:
mems[0].write_data(self._led_write_done)
def _disconnected(self):
self.uiState = UIState.DISCONNECTED
self._update_ui_state()
def _connection_initiated(self):
self.uiState = UIState.CONNECTING
self._update_ui_state()
def _led_write_done(self, mem, addr):
logger.info("LED write done callback")
def _logging_error(self, log_conf, msg):
QMessageBox.about(self, "Log error", "Error when starting log config"
" [{}]: {}".format(log_conf.name,
msg))
def _connection_lost(self, linkURI, msg):
if not self._auto_reconnect_enabled:
if self.isActiveWindow():
warningCaption = "Communication failure"
error = "Connection lost to {}: {}".format(linkURI, msg)
QMessageBox.critical(self, warningCaption, error)
self.uiState = UIState.DISCONNECTED
self._update_ui_state()
else:
self._connect()
def _connection_failed(self, linkURI, error):
if not self._auto_reconnect_enabled:
msg = "Failed to connect on {}: {}".format(linkURI, error)
warningCaption = "Communication failure"
QMessageBox.critical(self, warningCaption, msg)
self.uiState = UIState.DISCONNECTED
self._update_ui_state()
else:
self._connect()
def closeEvent(self, event):
self.hide()
self.cf.close_link()
Config().save_file()
def resizeEvent(self, event):
Config().set("window_size", [event.size().width(),
event.size().height()])
def _connect(self):
if self.uiState == UIState.CONNECTED:
self.cf.close_link()
elif self.uiState == UIState.CONNECTING:
self.cf.close_link()
self.uiState = UIState.DISCONNECTED
self._update_ui_state()
else:
self.cf.open_link(self._selected_interface)
def _scan(self):
self.uiState = UIState.SCANNING
self._update_ui_state()
self.scanner.scanSignal.emit(self.address.value())
def _display_input_device_error(self, error):
self.cf.close_link()
QMessageBox.critical(self, "Input device error", error)
def _mux_selected(self, checked):
"""Called when a new mux is selected. The menu item contains a
reference to the raw mux object as well as to the associated device
sub-nodes"""
if not checked:
(mux, sub_nodes) = self.sender().data()
for s in sub_nodes:
s.setEnabled(False)
else:
(mux, sub_nodes) = self.sender().data()
for s in sub_nodes:
s.setEnabled(True)
self.joystickReader.set_mux(mux=mux)
# Go though the tree and select devices/mapping that was
# selected before it was disabled.
for role_node in sub_nodes:
for dev_node in role_node.children():
if type(dev_node) is QAction and dev_node.isChecked():
dev_node.toggled.emit(True)
self._update_input_device_footer()
def _get_dev_status(self, device):
msg = "{}".format(device.name)
if device.supports_mapping:
map_name = "N/A"
if device.input_map:
map_name = device.input_map_name
msg += " ({})".format(map_name)
return msg
def _update_input_device_footer(self):
"""Update the footer in the bottom of the UI with status for the
input device and its mapping"""
msg = ""
if len(self.joystickReader.available_devices()) > 0:
mux = self.joystickReader._selected_mux
msg = "Using {} mux with ".format(mux.name)
for key in list(mux._devs.keys())[:-1]:
if mux._devs[key]:
msg += "{}, ".format(self._get_dev_status(mux._devs[key]))
else:
msg += "N/A, "
# Last item
key = list(mux._devs.keys())[-1]
if mux._devs[key]:
msg += "{}".format(self._get_dev_status(mux._devs[key]))
else:
msg += "N/A"
else:
msg = "No input device found"
self._statusbar_label.setText(msg)
def _inputdevice_selected(self, checked):
"""Called when a new input device has been selected from the menu. The
data in the menu object is the associated map menu (directly under the
item in the menu) and the raw device"""
(map_menu, device, mux_menu) = self.sender().data()
if not checked:
if map_menu:
map_menu.setEnabled(False)
# Do not close the device, since we don't know exactly
# how many devices the mux can have open. When selecting a
# new mux the old one will take care of this.
else:
if map_menu:
map_menu.setEnabled(True)
(mux, sub_nodes) = mux_menu.data()
for role_node in sub_nodes:
for dev_node in role_node.children():
if type(dev_node) is QAction and dev_node.isChecked():
if device.id == dev_node.data()[1].id \
and dev_node is not self.sender():
dev_node.setChecked(False)
role_in_mux = str(self.sender().parent().title()).strip()
logger.info("Role of {} is {}".format(device.name,
role_in_mux))
Config().set("input_device", str(device.name))
self._mapping_support = self.joystickReader.start_input(
device.name,
role_in_mux)
self._update_input_device_footer()
def _inputconfig_selected(self, checked):
"""Called when a new configuration has been selected from the menu. The
data in the menu object is a referance to the device QAction in parent
menu. This contains a referance to the raw device."""
if not checked:
return
selected_mapping = str(self.sender().text())
device = self.sender().data().data()[1]
self.joystickReader.set_input_map(device.name, selected_mapping)
self._update_input_device_footer()
def device_discovery(self, devs):
"""Called when new devices have been added"""
for menu in self._all_role_menus:
role_menu = menu["rolemenu"]
mux_menu = menu["muxmenu"]
dev_group = QActionGroup(role_menu, exclusive=True)
for d in devs:
dev_node = QAction(d.name, role_menu, checkable=True,
enabled=True)
role_menu.addAction(dev_node)
dev_group.addAction(dev_node)
dev_node.toggled.connect(self._inputdevice_selected)
map_node = None
if d.supports_mapping:
map_node = QMenu(" Input map", role_menu, enabled=False)
map_group = QActionGroup(role_menu, exclusive=True)
# Connect device node to map node for easy
# enabling/disabling when selection changes and device
# to easily enable it
dev_node.setData((map_node, d))
for c in ConfigManager().get_list_of_configs():
node = QAction(c, map_node, checkable=True,
enabled=True)
node.toggled.connect(self._inputconfig_selected)
map_node.addAction(node)
# Connect all the map nodes back to the device
# action node where we can access the raw device
node.setData(dev_node)
map_group.addAction(node)
# If this device hasn't been found before, then
# select the default mapping for it.
if d not in self._available_devices:
last_map = Config().get("device_config_mapping")
if d.name in last_map and last_map[d.name] == c:
node.setChecked(True)
role_menu.addMenu(map_node)
dev_node.setData((map_node, d, mux_menu))
# Update the list of what devices we found
# to avoid selecting default mapping for all devices when
# a new one is inserted
self._available_devices = ()
for d in devs:
self._available_devices += (d,)
# Only enable MUX nodes if we have enough devies to cover
# the roles
for mux_node in self._all_mux_nodes:
(mux, sub_nodes) = mux_node.data()
if len(mux.supported_roles()) <= len(self._available_devices):
mux_node.setEnabled(True)
# TODO: Currently only supports selecting default mux
if self._all_mux_nodes[0].isEnabled():
self._all_mux_nodes[0].setChecked(True)
# If the previous length of the available devies was 0, then select
# the default on. If that's not available then select the first
# on in the list.
# TODO: This will only work for the "Normal" mux so this will be
# selected by default
if Config().get("input_device") in [d.name for d in devs]:
for dev_menu in self._all_role_menus[0]["rolemenu"].actions():
if dev_menu.text() == Config().get("input_device"):
dev_menu.setChecked(True)
else:
# Select the first device in the first mux (will always be "Normal"
# mux)
self._all_role_menus[0]["rolemenu"].actions()[0].setChecked(True)
logger.info("Select first device")
self._update_input_device_footer()
def _open_config_folder(self):
QDesktopServices.openUrl(
QUrl("file:///" +
QDir.toNativeSeparators(cfclient.config_path)))
def closeAppRequest(self):
self.close()
sys.exit(0)
class ScannerThread(QThread):
scanSignal = pyqtSignal(object)
interfaceFoundSignal = pyqtSignal(object)
def __init__(self):
QThread.__init__(self)
self.moveToThread(self)
self.scanSignal.connect(self.scan)
def scan(self, address):
self.interfaceFoundSignal.emit(cflib.crtp.scan_interfaces(address))
|
Franky333/crazyflie-clients-python
|
src/cfclient/ui/main.py
|
Python
|
gpl-2.0
| 33,136
|
# coding: utf8
import requests
import arrow
from config import defaultConfig
class NavitiaImplementation:
def __init__(self, auth_key):
self.auth_key = auth_key
self.endpoint = 'https://api.navitia.io/v1/{url}'
def call(self, url, params=None):
headers = {'Authorization': self.auth_key}
result = requests.get(self.endpoint.format(url=url),
params=params,
headers=headers).json()
if 'error' in result:
raise RuntimeError('Error when querying Navitia API: {msg}. ({prms})'
.format(msg=result['error']['message'],
prms=params))
return result
class LocationManager:
def __init__(self, api_impl):
self.api = api_impl
self.aliases = defaultConfig.getSection('Aliases')
self.default_zone = defaultConfig.getConfig('Coverage',
'DefaultZone',
defaultValue='fr-idf',
writeIfMissing=True)
def set_coverage_zone(self, new_zone):
defaultConfig.setConfig('Coverage', 'DefaultZone', new_zone)
def whereiam(self, latitude, longitude):
endpoint = 'coord/{latitude};{longitude}'.format(latitude=latitude,
longitude=longitude)
return self.api.call(endpoint)
def get_place(self, place):
endpoint = 'coverage/{zone}/places'.format(zone=self.default_zone)
return self.api.call(endpoint, params={'q': place})
def get_place_id(self, place):
if not self.is_place(place):
return place
if place in self.aliases:
return self.aliases[place]
place = self.get_place(place)
return place['places'][0]['id']
def is_place(self, location):
return ';' not in location
def compute_journey(self, from_place, to_place):
endpoint = 'journeys'
fplace_id = self.get_place_id(from_place)
tplace_id = self.get_place_id(to_place)
params = {'from': fplace_id,
'to': tplace_id,
'datetime': arrow.now().format('YYYYMMDDHHmmss')}
return self.api.call(endpoint, params=params)
def initialize_api(auth_key):
if auth_key is not None:
defaultConfig.setToken(auth_key)
else:
auth_key = defaultConfig.getToken()
if auth_key is None:
print('Please set your API token using \'-t\' option and try again')
exit(1)
return LocationManager(NavitiaImplementation(auth_key))
|
RaitoBezarius/wannago
|
src/transport.py
|
Python
|
gpl-2.0
| 2,706
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 20 13:49:57 2016
@author: agiovann
"""
from builtins import range
from past.utils import old_div
import logging
import numpy as np
import scipy
try:
import numba
except:
pass
from scipy.linalg.lapack import dpotrf, dpotrs
from scipy import fftpack
#%%
def mode_robust_fast(inputData, axis=None):
"""
Robust estimator of the mode of a data set using the half-sample mode.
.. versionadded: 1.0.3
"""
if axis is not None:
def fnc(x):
return mode_robust_fast(x)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
data = inputData.ravel()
# The data need to be sorted for this to work
data = np.sort(data)
# Find the mode
dataMode = _hsm(data)
return dataMode
#%%
def mode_robust(inputData, axis=None, dtype=None):
"""
Robust estimator of the mode of a data set using the half-sample mode.
.. versionadded: 1.0.3
"""
if axis is not None:
def fnc(x):
return mode_robust(x, dtype=dtype)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
def _hsm(data):
if data.size == 1:
return data[0]
elif data.size == 2:
return data.mean()
elif data.size == 3:
i1 = data[1] - data[0]
i2 = data[2] - data[1]
if i1 < i2:
return data[:2].mean()
elif i2 > i1:
return data[1:].mean()
else:
return data[1]
else:
wMin = np.inf
N = data.size // 2 + data.size % 2
for i in range(0, N):
w = data[i + N - 1] - data[i]
if w < wMin:
wMin = w
j = i
return _hsm(data[j:j + N])
data = inputData.ravel()
if type(data).__name__ == "MaskedArray":
data = data.compressed()
if dtype is not None:
data = data.astype(dtype)
# The data need to be sorted for this to work
data = np.sort(data)
# Find the mode
dataMode = _hsm(data)
return dataMode
#%%
#@numba.jit("void(f4[:])")
def _hsm(data):
if data.size == 1:
return data[0]
elif data.size == 2:
return data.mean()
elif data.size == 3:
i1 = data[1] - data[0]
i2 = data[2] - data[1]
if i1 < i2:
return data[:2].mean()
elif i2 > i1:
return data[1:].mean()
else:
return data[1]
else:
wMin = np.inf
N = old_div(data.size, 2) + data.size % 2
for i in range(0, N):
w = data[i + N - 1] - data[i]
if w < wMin:
wMin = w
j = i
return _hsm(data[j:j + N])
def compressive_nmf(A, L, R, r, X=None, Y=None, max_iter=100, ls=0):
"""Implements compressive NMF using an ADMM method as described in
Tepper and Shapiro, IEEE TSP 2015
min_{U,V,X,Y} ||A - XY||_F^2 s.t. U = LX >= 0 and V = YR >=0
"""
#r_ov = L.shape[1]
m = L.shape[0]
n = R.shape[1]
U = np.random.rand(m, r)
V = np.random.rand(r, n)
Y = V.dot(R.T)
Lam = np.zeros(U.shape)
Phi = np.zeros(V.shape)
l = 1
f = 1
x = 1
I = np.eye(r)
it = 0
while it < max_iter:
it += 1
X = np.linalg.solve(Y.dot(Y.T) + l*I, Y.dot(A.T) + (l*U.T - Lam.T).dot(L)).T
Y = np.linalg.solve(X.T.dot(X) + f*I, X.T.dot(A) + (f*V - Phi - ls).dot(R.T))
LX = L.dot(X)
U = LX + Lam/l
U = np.where(U>0, U, 0)
YR = Y.dot(R)
V = YR + Phi/f
V = np.where(V>0, V, 0)
Lam += x*l*(LX - U)
Phi += x*f*(YR - V)
print(it)
return X, Y
#%% kernel density estimation
def mode_robust_kde(inputData, axis=None):
"""
Extracting the dataset of the mode using kernel density estimation
"""
if axis is not None:
def fnc(x):
return mode_robust_kde(x)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
bandwidth, mesh, density, cdf = kde(inputData)
dataMode = mesh[np.argamax(density)]
return dataMode
def df_percentile(inputData, axis=None):
"""
Extracting the percentile of the data where the mode occurs and its value.
Used to determine the filtering level for DF/F extraction. Note that
computation can be innacurate for short traces.
"""
if axis is not None:
def fnc(x):
return df_percentile(x)
result = np.apply_along_axis(fnc, axis, inputData)
data_prct = result[:, 0]
val = result[:, 1]
else:
# Create the function that we can use for the half-sample mode
err = True
while err:
try:
bandwidth, mesh, density, cdf = kde(inputData)
err = False
except:
logging.warning('Percentile computation failed. Duplicating ' + 'and trying again.')
if not isinstance(inputData, list):
inputData = inputData.tolist()
inputData += inputData
data_prct = cdf[np.argmax(density)] * 100
val = mesh[np.argmax(density)]
if data_prct >= 100 or data_prct < 0:
logging.warning('Invalid percentile computed possibly due ' + 'short trace. Duplicating and recomuputing.')
if not isinstance(inputData, list):
inputData = inputData.tolist()
inputData *= 2
err = True
if np.isnan(data_prct):
logging.warning('NaN percentile computed. Reverting to median.')
data_prct = 50
val = np.median(np.array(inputData))
return data_prct, val
"""
An implementation of the kde bandwidth selection method outlined in:
Z. I. Botev, J. F. Grotowski, and D. P. Kroese. Kernel density
estimation via diffusion. The Annals of Statistics, 38(5):2916-2957, 2010.
Based on the implementation in Matlab by Zdravko Botev.
Daniel B. Smith, PhD
Updated 1-23-2013
"""
def kde(data, N=None, MIN=None, MAX=None):
# Parameters to set up the mesh on which to calculate
N = 2**12 if N is None else int(2**scipy.ceil(scipy.log2(N)))
if MIN is None or MAX is None:
minimum = min(data)
maximum = max(data)
Range = maximum - minimum
MIN = minimum - Range / 10 if MIN is None else MIN
MAX = maximum + Range / 10 if MAX is None else MAX
# Range of the data
R = MAX - MIN
# Histogram the data to get a crude first approximation of the density
M = len(data)
DataHist, bins = scipy.histogram(data, bins=N, range=(MIN, MAX))
DataHist = DataHist / M
DCTData = fftpack.dct(DataHist, norm=None)
I = [iN * iN for iN in range(1, N)]
SqDCTData = (DCTData[1:] / 2)**2
# The fixed point calculation finds the bandwidth = t_star
guess = 0.1
try:
t_star = scipy.optimize.brentq(fixed_point, 0, guess, args=(M, I, SqDCTData))
except ValueError:
print('Oops!')
return None
# Smooth the DCTransformed data using t_star
SmDCTData = DCTData * scipy.exp(-scipy.arange(N)**2 * scipy.pi**2 * t_star / 2)
# Inverse DCT to get density
density = fftpack.idct(SmDCTData, norm=None) * N / R
mesh = [(bins[i] + bins[i + 1]) / 2 for i in range(N)]
bandwidth = scipy.sqrt(t_star) * R
density = density / scipy.trapz(density, mesh)
cdf = np.cumsum(density) * (mesh[1] - mesh[0])
return bandwidth, mesh, density, cdf
def fixed_point(t, M, I, a2):
l = 7
I = scipy.float64(I)
M = scipy.float64(M)
a2 = scipy.float64(a2)
f = 2 * scipy.pi**(2 * l) * scipy.sum(I**l * a2 * scipy.exp(-I * scipy.pi**2 * t))
for s in range(l, 1, -1):
K0 = scipy.prod(range(1, 2 * s, 2)) / scipy.sqrt(2 * scipy.pi)
const = (1 + (1 / 2)**(s + 1 / 2)) / 3
time = (2 * const * K0 / M / f)**(2 / (3 + 2 * s))
f = 2 * scipy.pi**(2 * s) * scipy.sum(I**s * a2 * scipy.exp(-I * scipy.pi**2 * time))
return t - (2 * M * scipy.sqrt(scipy.pi) * f)**(-2 / 5)
def csc_column_remove(A, ind):
""" Removes specified columns for a scipy.sparse csc_matrix
Args:
A: scipy.sparse.csc_matrix
Input matrix
ind: iterable[int]
list or np.array with columns to be removed
"""
d1, d2 = A.shape
if 'csc_matrix' not in str(type(A)): # FIXME
logging.warning("Original matrix not in csc_format. Converting it" + " anyway.")
A = scipy.sparse.csc_matrix(A)
indptr = A.indptr
ind_diff = np.diff(A.indptr).tolist()
ind_sort = sorted(ind, reverse=True)
data_list = [A.data[indptr[i]:indptr[i + 1]] for i in range(d2)]
indices_list = [A.indices[indptr[i]:indptr[i + 1]] for i in range(d2)]
for i in ind_sort:
del data_list[i]
del indices_list[i]
del ind_diff[i]
indptr_final = np.cumsum([0] + ind_diff)
data_final = [item for sublist in data_list for item in sublist]
indices_final = [item for sublist in indices_list for item in sublist]
A = scipy.sparse.csc_matrix((data_final, indices_final, indptr_final), shape=[d1, d2 - len(ind)])
return A
def pd_solve(a, b):
""" Fast matrix solve for positive definite matrix a"""
L, info = dpotrf(a)
if info == 0:
return dpotrs(L, b)[0]
else:
return np.linalg.solve(a, b)
|
simonsfoundation/CaImAn
|
caiman/utils/stats.py
|
Python
|
gpl-2.0
| 9,843
|
''' Setup for core modules
'''
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('parallel', parent_package, top_path)
config.set_options(quiet=True)
config.add_subpackage('core')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
ezralanglois/arachnid
|
arachnid/core/parallel/setup.py
|
Python
|
gpl-2.0
| 412
|
import classutils
from random import shuffle
class _Seat(object):
def __init__(self,table,seat,student):
self.table = table
self.seat = seat
self.student=student
def __str__(self):
return str(self.student)
def seat_randomizer(section,tables,seats, filename=None, msg=None):
"""
randomize students.
input:
section number
list of student instances
output:
None, but produces formatted txt file to be used as seating chart
"""
if filename is None:
filename=str(section)+'.txt'
seat_list = []
student_list = classutils.get_section(section)
shuffle(student_list)
for i in range(0,seats):
for j in range(0,tables):
try:
seat_list.append(_Seat(j,i,student_list[0]))
del(student_list[0])
except:
continue
if msg is None: msg=''
f = open(filename,"w")
f.write("::::::::::::::::::\n "+msg+"\n::::::::::::::::::\n")
for i in range(0,tables):
f.write(("\nTable %-2d\n===============================\n")%(i+1))
for j in range(0,seats):
for item in seat_list:
if item.table == i and item.seat == j:
f.write(str(item)+"\n")
else:
pass
f.close()
return
|
astroScott/TeachingUtilities
|
seat_randomizer.py
|
Python
|
gpl-2.0
| 1,374
|
from __future__ import unicode_literals
import glob
import datetime
import socket
import os, sys
import win32process
import re
import win32security, ntsecuritycon, win32api, win32con, win32file
import win32service
import pywintypes # doesn't play well with molebox pro - why did we need this anyway?
import win32net
import ctypes
import getopt
import _winreg
import win32netcon
from subprocess import Popen, PIPE, STDOUT
from ntsecuritycon import TokenSessionId, TokenSandBoxInert, TokenType, TokenImpersonationLevel, TokenVirtualizationEnabled, TokenVirtualizationAllowed, TokenHasRestrictions, TokenElevationType, TokenUIAccess, TokenUser, TokenOwner, TokenGroups, TokenRestrictedSids, TokenPrivileges, TokenPrimaryGroup, TokenSource, TokenDefaultDacl, TokenStatistics, TokenOrigin, TokenLinkedToken, TokenLogonSid, TokenElevation, TokenIntegrityLevel, TokenMandatoryPolicy, SE_ASSIGNPRIMARYTOKEN_NAME, SE_BACKUP_NAME, SE_CREATE_PAGEFILE_NAME, SE_CREATE_TOKEN_NAME, SE_DEBUG_NAME, SE_LOAD_DRIVER_NAME, SE_MACHINE_ACCOUNT_NAME, SE_RESTORE_NAME, SE_SHUTDOWN_NAME, SE_TAKE_OWNERSHIP_NAME, SE_TCB_NAME
import unicodedata
k32 = ctypes.windll.kernel32
wow64 = ctypes.c_long( 0 )
on64bitwindows = 1
remote_server = None
remote_username = None
remote_password = None
remote_domain = None
local_ips = socket.gethostbyname_ex(socket.gethostname())[2] # have to do this before Wow64DisableWow64FsRedirection
version = "1.0"
svnversion="$Revision$" # Don't change this line. Auto-updated.
svnnum=re.sub('[^0-9]', '', svnversion)
if svnnum:
version = version + "svn" + svnnum
all_checks = 0
registry_checks = 0
path_checks = 0
service_checks = 0
service_audit = 0
drive_checks = 0
eventlog_checks = 0
progfiles_checks = 0
process_checks = 0
share_checks = 0
passpol_audit = 0
user_group_audit = 0
logged_in_audit = 0
process_audit = 0
admin_users_audit= 0
host_info_audit = 0
ignore_trusted = 0
owner_info = 0
weak_perms_only = 0
host_info_audit = 0
patch_checks = 0
verbose = 0
report_file_name = None
kb_nos = {
'977165': 'MS10_015 Vulnerabilities in Windows Kernel Could Allow Elevation of Privilege (kitrap0d - meterpreter "getsystem")',
'828749': 'MS03_049 Microsoft Workstation Service NetAddAlternateComputerName Overflow (netapi) ',
'828028': 'MS04_007 Microsoft ASN.1 Library Bitstring Heap Overflow (killbill) ',
'835732': 'MS04_011 Microsoft LSASS Service DsRolerUpgradeDownlevelServer Overflow (lsass) ',
'841533': 'MS04_031 Microsoft NetDDE Service Overflow (netdde)',
'899588': 'MS05_039 Microsoft Plug and Play Service Overflow (pnp)',
'911280': 'MS06_025 Microsoft RRAS Service RASMAN Registry Overflow (rasmans_reg)',
'911280': 'MS06_025 Microsoft RRAS Service Overflow (rras)',
'921883': 'MS06_040 Microsoft Server Service NetpwPathCanonicalize Overflow (netapi)',
'923980': 'MS06_066 Microsoft Services MS06-066 nwapi32.dll (nwapi)',
'923980': 'MS06_066 Microsoft Services MS06-066 nwwks.dll (nwwks)',
'924270': 'MS06_070 Microsoft Workstation Service NetpManageIPCConnect Overflow (wkssvc)',
'935966': 'MS07_029 Microsoft DNS RPC Service extractQuotedChar() Overflow (SMB) (msdns_zonename)',
'958644': 'MS08_067 Microsoft Server Service Relative Path Stack Corruption (netapi)',
'975517': 'MS09_050 Microsoft SRV2.SYS SMB Negotiate ProcessID Function Table Dereference (smb2_negotiate_func_index)',
'823980': 'MS03_026 Microsoft RPC DCOM Interface Overflow',
'892944': 'MS05_017 Microsoft Message Queueing Service Path Overflow',
'937894': 'MS07_065 Microsoft Message Queueing Service DNS Name Path Overflow',
'2592799': 'MS11-080: Vulnerability in ancillary function driver could allow elevation of privilege',
'2305420': 'MS10-092: Vulnerability in Task Scheduler could allow for elevation of privilege'
}
reg_paths = (
'HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services',
'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Run',
'HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\Run',
'HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\RunOnce',
'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\Run',
'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Winlogon\Shell',
"HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Winlogon\\Userinit",
'HKEY_LOCAL_MACHINE\Software\Microsoft\Windows\CurrentVersion\RunOnce',
'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\RunOnce',
'HKEY_LOCAL_MACHINE\Software\Microsoft\Windows\CurrentVersion\RunServices',
'HKEY_LOCAL_MACHINE\Software\Microsoft\Windows\CurrentVersion\RunServicesOnce',
'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\RunServices',
'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\RunServicesOnce',
'HKEY_CURRENT_USER\Software\Microsoft\Windows NT\CurrentVersion\Windows',
)
# We don't care if some users / groups hold dangerous permission because they're trusted
# These have fully qualified names:
trusted_principles_fq = (
"BUILTIN\\Administrators",
u"BUILTIN\\Rendszergazd\xe1k", # Hungarian name for Administrators
"NT SERVICE\\TrustedInstaller",
"NT AUTHORITY\\SYSTEM"
)
# We may temporarily regard a user as trusted (e.g. if we're looking for writable
# files in a user's path, we do not care that he can write to his own path)
tmp_trusted_principles_fq = (
)
eventlog_key_hklm = 'SYSTEM\CurrentControlSet\Services\Eventlog'
# We don't care if members of these groups hold dangerous permission because they're trusted
# These have names without a domain:
trusted_principles = (
"Administrators",
u"Rendszergazd\xe1k", # Hungarian name for Administrators
"Domain Admins",
"Enterprise Admins",
)
# Windows privileges from
windows_privileges = (
"SeAssignPrimaryTokenPrivilege",
"SeBackupPrivilege",
"SeCreatePagefilePrivilege",
"SeCreateTokenPrivilege",
"SeDebugPrivilege",
"SeEnableDelegationPrivilege",
"SeLoadDriverPrivilege",
"SeMachineAccountPrivilege",
"SeManageVolumePrivilege",
"SeRelabelPrivilege",
"SeRestorePrivilege",
"SeShutdownPrivilege",
"SeSyncAgentPrivilege",
"SeTakeOwnershipPrivilege",
"SeTcbPrivilege",
"SeTrustedCredManAccessPrivilege",
"SeSecurityPrivilege",
"SeRemoteShutdownPrivilege",
"SeProfileSingleProcessPrivilege",
"SeAuditPrivilege",
"SeIncreaseBasePriorityPrivilege",
"SeIncreaseWorkingSetPrivilege",
"SeIncreaseQuotaPrivilege",
"SeLockMemoryPrivilege",
"SeSystemEnvironmentPrivilege",
"SeChangeNotifyPrivilege",
"SeCreateGlobalPrivilege",
"SeCreatePermanentPrivilege",
"SeCreateSymbolicLinkPrivilege",
"SeImpersonatePrivilege",
"SeSystemProfilePrivilege",
"SeSystemtimePrivilege",
"SeTimeZonePrivilege",
"SeUndockPrivilege",
"SeUnsolicitedInputPrivilege",
"SeBatchLogonRight",
"SeDenyBatchLogonRight",
"SeDenyInteractiveLogonRight",
"SeDenyNetworkLogonRight",
"SeDenyRemoteInteractiveLogonRight",
"SeDenyServiceLogonRight",
"SeInteractiveLogonRight",
"SeNetworkLogonRight",
"SeRemoteInteractiveLogonRight",
"SeServiceLogonRight"
)
share_types = (
"STYPE_IPC",
"STYPE_DISKTREE",
"STYPE_PRINTQ",
"STYPE_DEVICE",
)
sv_types = (
"SV_TYPE_WORKSTATION",
"SV_TYPE_SERVER",
"SV_TYPE_SQLSERVER",
"SV_TYPE_DOMAIN_CTRL",
"SV_TYPE_DOMAIN_BAKCTRL",
"SV_TYPE_TIME_SOURCE",
"SV_TYPE_AFP",
"SV_TYPE_NOVELL",
"SV_TYPE_DOMAIN_MEMBER",
"SV_TYPE_PRINTQ_SERVER",
"SV_TYPE_DIALIN_SERVER",
"SV_TYPE_XENIX_SERVER",
"SV_TYPE_NT",
"SV_TYPE_WFW",
"SV_TYPE_SERVER_MFPN",
"SV_TYPE_SERVER_NT",
"SV_TYPE_POTENTIAL_BROWSER",
"SV_TYPE_BACKUP_BROWSER",
"SV_TYPE_MASTER_BROWSER",
"SV_TYPE_DOMAIN_MASTER",
"SV_TYPE_SERVER_OSF",
"SV_TYPE_SERVER_VMS",
"SV_TYPE_WINDOWS",
"SV_TYPE_DFS",
"SV_TYPE_CLUSTER_NT",
"SV_TYPE_TERMINALSERVER", # missing from win32netcon.py
#"SV_TYPE_CLUSTER_VS_NT", # missing from win32netcon.py
"SV_TYPE_DCE",
"SV_TYPE_ALTERNATE_XPORT",
"SV_TYPE_LOCAL_LIST_ONLY",
"SV_TYPE_DOMAIN_ENUM"
)
win32netcon.SV_TYPE_TERMINALSERVER = 0x2000000
dangerous_perms_write = {
# http://www.tek-tips.com/faqs.cfm?fid
'share': {
ntsecuritycon: (
"FILE_READ_DATA", #
"FILE_WRITE_DATA",
"FILE_APPEND_DATA",
"FILE_READ_EA", #
"FILE_WRITE_EA",
"FILE_EXECUTE", #
"FILE_READ_ATTRIBUTES", #
"FILE_WRITE_ATTRIBUTES",
"DELETE",
"READ_CONTROL", #
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE", #
)
},
'file': {
ntsecuritycon: (
#"FILE_READ_DATA",
"FILE_WRITE_DATA",
"FILE_APPEND_DATA",
#"FILE_READ_EA",
"FILE_WRITE_EA",
#"FILE_EXECUTE",
#"FILE_READ_ATTRIBUTES",
"FILE_WRITE_ATTRIBUTES",
"DELETE",
#"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
#"SYNCHRONIZE",
)
},
# http://msdn.microsoft.com/en-us/library/ms724878(VS.85).aspx
# KEY_ALL_ACCESS: STANDARD_RIGHTS_REQUIRED KEY_QUERY_VALUE KEY_SET_VALUE KEY_CREATE_SUB_KEY KEY_ENUMERATE_SUB_KEYS KEY_NOTIFY KEY_CREATE_LINK
# KEY_CREATE_LINK (0x0020) Reserved for system use.
# KEY_CREATE_SUB_KEY (0x0004) Required to create a subkey of a registry key.
# KEY_ENUMERATE_SUB_KEYS (0x0008) Required to enumerate the subkeys of a registry key.
# KEY_EXECUTE (0x20019) Equivalent to KEY_READ.
# KEY_NOTIFY (0x0010) Required to request change notifications for a registry key or for subkeys of a registry key.
# KEY_QUERY_VALUE (0x0001) Required to query the values of a registry key.
# KEY_READ (0x20019) Combines the STANDARD_RIGHTS_READ, KEY_QUERY_VALUE, KEY_ENUMERATE_SUB_KEYS, and KEY_NOTIFY values.
# KEY_SET_VALUE (0x0002) Required to create, delete, or set a registry value.
# KEY_WOW64_32KEY (0x0200) Indicates that an application on 64-bit Windows should operate on the 32-bit registry view. For more information, see Accessing an Alternate Registry View. This flag must be combined using the OR operator with the other flags in this table that either query or access registry values.
# Windows 2000: This flag is not supported.
# KEY_WOW64_64KEY (0x0100) Indicates that an application on 64-bit Windows should operate on the 64-bit registry view. For more information, see Accessing an Alternate Registry View.
# This flag must be combined using the OR operator with the other flags in this table that either query or access registry values.
# Windows 2000: This flag is not supported.
# KEY_WRITE (0x20006) Combines the STANDARD_RIGHTS_WRITE, KEY_SET_VALUE, and KEY_CREATE_SUB_KEY access rights.
# "STANDARD_RIGHTS_REQUIRED",
# "STANDARD_RIGHTS_WRITE",
# "STANDARD_RIGHTS_READ",
# "DELETE",
# "READ_CONTROL",
# "WRITE_DAC",
#"WRITE_OWNER",
'reg': {
_winreg: (
#"KEY_ALL_ACCESS", # Combines the STANDARD_RIGHTS_REQUIRED, KEY_QUERY_VALUE, KEY_SET_VALUE, KEY_CREATE_SUB_KEY, KEY_ENUMERATE_SUB_KEYS, KEY_NOTIFY, and KEY_CREATE_LINK access rights.
#"KEY_QUERY_VALUE", # GUI "Query Value"
"KEY_SET_VALUE", # GUI "Set Value". Required to create, delete, or set a registry value.
"KEY_CREATE_LINK", # GUI "Create Link". Reserved for system use.
"KEY_CREATE_SUB_KEY", # GUI "Create subkey"
# "KEY_ENUMERATE_SUB_KEYS", # GUI "Create subkeys"
# "KEY_NOTIFY", # GUI "Notify"
#"KEY_EXECUTE", # same as KEY_READ
#"KEY_READ",
#"KEY_WOW64_32KEY",
#"KEY_WOW64_64KEY",
# "KEY_WRITE", # Combines the STANDARD_RIGHTS_WRITE, KEY_SET_VALUE, and KEY_CREATE_SUB_KEY access rights.
),
ntsecuritycon: (
"DELETE", # GUI "Delete"
# "READ_CONTROL", # GUI "Read Control" - read security descriptor
"WRITE_DAC", # GUI "Write DAC"
"WRITE_OWNER", # GUI "Write Owner"
#"STANDARD_RIGHTS_REQUIRED",
#"STANDARD_RIGHTS_WRITE",
#"STANDARD_RIGHTS_READ",
)
},
'directory': {
ntsecuritycon: (
#"FILE_LIST_DIRECTORY",
"FILE_ADD_FILE",
"FILE_ADD_SUBDIRECTORY",
#"FILE_READ_EA",
"FILE_WRITE_EA",
#"FILE_TRAVERSE",
"FILE_DELETE_CHILD",
#"FILE_READ_ATTRIBUTES",
"FILE_WRITE_ATTRIBUTES",
"DELETE",
#"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
#"SYNCHRONIZE",
)
},
'service_manager': {
# For service manager
# http://msdn.microsoft.com/en-us/library/ms685981(VS.85).aspx
# SC_MANAGER_ALL_ACCESS (0xF003F) Includes STANDARD_RIGHTS_REQUIRED, in addition to all access rights in this table.
# SC_MANAGER_CREATE_SERVICE (0x0002) Required to call the CreateService function to create a service object and add it to the database.
# SC_MANAGER_CONNECT (0x0001) Required to connect to the service control manager.
# SC_MANAGER_ENUMERATE_SERVICE (0x0004) Required to call the EnumServicesStatusEx function to list the services that are in the database.
# SC_MANAGER_LOCK (0x0008) Required to call the LockServiceDatabase function to acquire a lock on the database.
# SC_MANAGER_MODIFY_BOOT_CONFIG (0x0020) Required to call the NotifyBootConfigStatus function.
# SC_MANAGER_QUERY_LOCK_STATUS (0x0010)Required to call the QueryServiceLockStatus function to retrieve the lock status information for the database.
win32service: (
"SC_MANAGER_ALL_ACCESS",
"SC_MANAGER_CREATE_SERVICE",
"SC_MANAGER_CONNECT",
"SC_MANAGER_ENUMERATE_SERVICE",
"SC_MANAGER_LOCK",
"SC_MANAGER_MODIFY_BOOT_CONFIG",
"SC_MANAGER_QUERY_LOCK_STATUS",
)
},
'service': {
# For services:
# http://msdn.microsoft.com/en-us/library/ms685981(VS.85).aspx
# SERVICE_ALL_ACCESS (0xF01FF) Includes STANDARD_RIGHTS_REQUIRED in addition to all access rights in this table.
# SERVICE_CHANGE_CONFIG (0x0002) Required to call the ChangeServiceConfig or ChangeServiceConfig2 function to change the service configuration. Because this grants the caller the right to change the executable file that the system runs, it should be granted only to administrators.
# SERVICE_ENUMERATE_DEPENDENTS (0x0008) Required to call the EnumDependentServices function to enumerate all the services dependent on the service.
# SERVICE_INTERROGATE (0x0080) Required to call the ControlService function to ask the service to report its status immediately.
# SERVICE_PAUSE_CONTINUE (0x0040) Required to call the ControlService function to pause or continue the service.
# SERVICE_QUERY_CONFIG (0x0001) Required to call the QueryServiceConfig and QueryServiceConfig2 functions to query the service configuration.
# SERVICE_QUERY_STATUS (0x0004) Required to call the QueryServiceStatusEx function to ask the service control manager about the status of the service.
# SERVICE_START (0x0010) Required to call the StartService function to start the service.
# SERVICE_STOP (0x0020) Required to call the ControlService function to stop the service.
# SERVICE_USER_DEFINED_CONTROL(0x0100) Required to call the ControlService function to specify a user-defined control code.
win32service: (
# "SERVICE_INTERROGATE",
# "SERVICE_QUERY_STATUS",
# "SERVICE_ENUMERATE_DEPENDENTS",
"SERVICE_ALL_ACCESS",
"SERVICE_CHANGE_CONFIG",
"SERVICE_PAUSE_CONTINUE",
# "SERVICE_QUERY_CONFIG",
"SERVICE_START",
"SERVICE_STOP",
# "SERVICE_USER_DEFINED_CONTROL", # TODO this is granted most of the time. Double check that's not a bad thing.
)
},
}
all_perms = {
'share': {
ntsecuritycon: (
"FILE_READ_DATA", #
"FILE_WRITE_DATA",
"FILE_APPEND_DATA",
"FILE_READ_EA", #
"FILE_WRITE_EA",
"FILE_EXECUTE", #
"FILE_READ_ATTRIBUTES", #
"FILE_WRITE_ATTRIBUTES",
"DELETE",
"READ_CONTROL", #
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE", #
)
},
'file': {
ntsecuritycon: (
"FILE_READ_DATA",
"FILE_WRITE_DATA",
"FILE_APPEND_DATA",
"FILE_READ_EA",
"FILE_WRITE_EA",
"FILE_EXECUTE",
"FILE_READ_ATTRIBUTES",
"FILE_WRITE_ATTRIBUTES",
"DELETE",
"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE",
)
},
'reg': {
_winreg: (
"KEY_ALL_ACCESS",
"KEY_CREATE_LINK",
"KEY_CREATE_SUB_KEY",
"KEY_ENUMERATE_SUB_KEYS",
"KEY_EXECUTE",
"KEY_NOTIFY",
"KEY_QUERY_VALUE",
"KEY_READ",
"KEY_SET_VALUE",
"KEY_WOW64_32KEY",
"KEY_WOW64_64KEY",
"KEY_WRITE",
),
ntsecuritycon: (
"DELETE",
"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
"STANDARD_RIGHTS_REQUIRED",
"STANDARD_RIGHTS_WRITE",
"STANDARD_RIGHTS_READ",
"SYNCHRONIZE",
)
},
'directory': {
ntsecuritycon: (
"FILE_LIST_DIRECTORY",
"FILE_ADD_FILE",
"FILE_ADD_SUBDIRECTORY",
"FILE_READ_EA",
"FILE_WRITE_EA",
"FILE_TRAVERSE",
"FILE_DELETE_CHILD",
"FILE_READ_ATTRIBUTES",
"FILE_WRITE_ATTRIBUTES",
"DELETE",
"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE",
)
},
'service_manager': {
win32service: (
"SC_MANAGER_ALL_ACCESS",
"SC_MANAGER_CREATE_SERVICE",
"SC_MANAGER_CONNECT",
"SC_MANAGER_ENUMERATE_SERVICE",
"SC_MANAGER_LOCK",
"SC_MANAGER_MODIFY_BOOT_CONFIG",
"SC_MANAGER_QUERY_LOCK_STATUS",
)
},
'service': {
win32service: (
"SERVICE_INTERROGATE",
"SERVICE_QUERY_STATUS",
"SERVICE_ENUMERATE_DEPENDENTS",
"SERVICE_ALL_ACCESS",
"SERVICE_CHANGE_CONFIG",
"SERVICE_PAUSE_CONTINUE",
"SERVICE_QUERY_CONFIG",
"SERVICE_START",
"SERVICE_STOP",
"SERVICE_USER_DEFINED_CONTROL", # TODO this is granted most of the time. Double check that's not a bad thing.
)
},
'process': {
win32con: (
"PROCESS_TERMINATE",
"PROCESS_CREATE_THREAD",
"PROCESS_VM_OPERATION",
"PROCESS_VM_READ",
"PROCESS_VM_WRITE",
"PROCESS_DUP_HANDLE",
"PROCESS_CREATE_PROCESS",
"PROCESS_SET_QUOTA",
"PROCESS_SET_INFORMATION",
"PROCESS_QUERY_INFORMATION",
"PROCESS_ALL_ACCESS"
),
ntsecuritycon: (
"DELETE",
"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE",
"STANDARD_RIGHTS_REQUIRED",
"STANDARD_RIGHTS_READ",
"STANDARD_RIGHTS_WRITE",
"STANDARD_RIGHTS_EXECUTE",
"STANDARD_RIGHTS_ALL",
"SPECIFIC_RIGHTS_ALL",
"ACCESS_SYSTEM_SECURITY",
"MAXIMUM_ALLOWED",
"GENERIC_READ",
"GENERIC_WRITE",
"GENERIC_EXECUTE",
"GENERIC_ALL"
)
},
'thread': {
win32con: (
"THREAD_TERMINATE",
"THREAD_SUSPEND_RESUME",
"THREAD_GET_CONTEXT",
"THREAD_SET_CONTEXT",
"THREAD_SET_INFORMATION",
"THREAD_QUERY_INFORMATION",
"THREAD_SET_THREAD_TOKEN",
"THREAD_IMPERSONATE",
"THREAD_DIRECT_IMPERSONATION",
"THREAD_ALL_ACCESS",
"THREAD_QUERY_LIMITED_INFORMATION",
"THREAD_SET_LIMITED_INFORMATION"
),
ntsecuritycon: (
"DELETE",
"READ_CONTROL",
"WRITE_DAC",
"WRITE_OWNER",
"SYNCHRONIZE",
)
},
}
# Used to store a data structure representing the issues we've found
# We use this to generate the report
issues = {}
issue_template = {
'WPC001': {
'title': "Insecure Permissions on Program Files",
'description': '''Some of the programs in %ProgramFiles% and/or %ProgramFiles(x86)% could be changed by non-administrative users.
This could allow certain users on the system to place malicious code into certain key directories, or to replace programs with malicious ones. A malicious local user could use this technique to hijack the privileges of other local users, running commands with their privileges.
''',
'recommendation': '''Programs run by multiple users should only be changable only by administrative users. The directories containing these programs should only be changable only by administrators too. Revoke write privileges for non-administrative users from the above programs and directories.''',
'supporting_data': {
'writable_progs': {
'section': "description",
'preamble': "The programs below can be modified by non-administrative users:",
},
'writable_dirs': {
'section': "description",
'preamble': "The directories below can be changed by non-administrative users:",
},
}
},
'WPC002': {
'title': "Insecure Permissions on Files and Directories in Path (OBSELETE ISSUE)",
'description': '''Some of the programs and directories in the %PATH% variable could be changed by non-administrative users.
This could allow certain users on the system to place malicious code into certain key directories, or to replace programs with malicious ones. A malicious local user could use this technique to hijack the privileges of other local users, running commands with their privileges.
''',
'recommendation': '''Programs run by multiple users should only be changable only by administrative users. The directories containing these programs should only be changable only by administrators too. Revoke write privileges for non-administrative users from the above programs and directories.''',
'supporting_data': {
'writable_progs': {
'section': "description",
'preamble': "The programs below are in the path of the user used to carry out this audit. Each one can be changed by non-administrative users:",
},
'writable_dirs': {
'section': "description",
'preamble': "The directories below are in the path of the user used to carry out this audit. Each one can be changed by non-administrative users:",
}
}
},
'WPC003': {
'title': "Insecure Permissions In Windows Registry",
'description': '''Some registry keys that hold the names of programs run by other users were checked and found to have insecure permissions. It would be possible for non-administrative users to modify the registry to cause a different programs to be run. This weakness could be abused by low-privileged users to run commands of their choosing with higher privileges.''',
'recommendation': '''Modify the permissions on the above registry keys to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_reg_paths': {
'section': "description",
'preamble': "The registry keys below could be changed by non-administrative users:",
},
}
},
'WPC004': {
'title': "Insecure Permissions On Windows Service Executables",
'description': '''Some of the programs that are run when Windows Services start were found to have weak file permissions. It is possible for non-administrative local users to replace some of the Windows Service executables with malicious programs.''',
'recommendation': '''Modify the permissions on the above programs to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_progs': {
'section': "description",
'preamble': "The programs below could be changed by non-administrative users:",
},
}
},
'WPC005': {
'title': "Insecure Permissions On Windows Service Registry Keys (NOT IMPLEMENTED YET)",
'description': '''Some registry keys that hold the names of programs that are run when Windows Services start were found to have weak file permissions. They could be changed by non-administrative users to cause malicious programs to be run instead of the intended Windows Service Executable.''',
'recommendation': '''Modify the permissions on the above programs to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_reg_paths': {
'section': "description",
'preamble': "The registry keys below could be changed by non-administrative users:",
},
}
},
'WPC007': {
'title': "Insecure Permissions On Event Log File",
'description': '''Some of the Event Log files could be changed by non-administrative users. This may allow attackers to cover their tracks.''',
'recommendation': '''Modify the permissions on the above files to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_eventlog_file': {
'section': "description",
'preamble': "The files below could be changed by non-administrative users:",
},
}
},
'WPC008': {
'title': "Insecure Permissions On Event Log DLL",
'description': '''Some DLL files used by Event Viewer to display logs could be changed by non-administrative users. It may be possible to replace these with a view to having code run when an administrative user next views log files.''',
'recommendation': '''Modify the permissions on the above DLLs to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_eventlog_dll': {
'section': "description",
'preamble': "The DLL files below could be changed by non-administrative users:",
},
}
},
'WPC009': {
'title': "Insecure Permissions On Event Log Registry Key (NOT IMPLMENTED YET)",
'description': '''Some registry keys that hold the names of DLLs used by Event Viewer and the location of Log Files are writable by non-administrative users. It may be possible to maliciouly alter the registry to change the location of log files or run malicious code.''',
'recommendation': '''Modify the permissions on the above programs to allow only administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_eventlog_key': {
'section': "description",
'preamble': "The registry keys below could be changed by non-administrative users:",
},
}
},
'WPC010': {
'title': "Insecure Permissions On Drive Root",
'description': '''Some of the local drive roots allow non-administrative users to create files and folders. This could allow malicious files to be placed in on the server in the hope that they'll allow a local user to escalate privileges (e.g. create program.exe which might get accidentally launched by another user).''',
'recommendation': '''Modify the permissions on the drive roots to only allow administrators write access. Revoke write access from low-privileged users.''',
'supporting_data': {
'writable_drive_root': {
'section': "description",
'preamble': "The following drives allow non-administrative users to write to their root directory:",
},
}
},
'WPC011': {
'title': "Insecure (Non-NTFS) File System Used",
'description': '''Some local drives use Non-NTFS file systems. These drive therefore don't allow secure file permissions to be used. Any local user can change any data on these drives.''',
'recommendation': '''Use NTFS filesystems instead of FAT. Ensure that strong file permissions are set - NTFS file permissions are insecure by default after FAT file systems are converted.''',
'supporting_data': {
'fat_fs_drives': {
'section': "description",
'preamble': "The following drives use Non-NTFS file systems:",
},
}
},
'WPC012': {
'title': "Insecure Permissions On Windows Services",
'description': '''Some of the Windows Services installed have weak permissions. This could allow non-administrators to manipulate services to their own advantage. The impact depends on the permissions granted, but can include starting services, stopping service or even reconfiguring them to run a different program. This can lead to denial of service or even privilege escalation if the service is running as a user with more privilege than a malicious local user.''',
'recommendation': '''Review the permissions that have been granted to non-administrative users and revoke access where possible.''',
'supporting_data': {
'weak_service_perms': {
'section': "description",
'preamble': "Some Windows Services can be manipulated by non-administrator users:",
},
}
},
'WPC013': {
'title': "Insecure Permissions On Files / Directories In System PATH",
'description': '''Some programs/directories in the system path have weak permissions. TODO which user are affected by this issue?''',
'recommendation': '''Review the permissions that have been granted to non-administrative users and revoke access where possible.''',
'supporting_data': {
'weak_perms_exe': {
'section': "description",
'preamble': "The following programs/DLLs in the system PATH can be manipulated by non-administrator users:",
},
'weak_perms_dir': {
'section': "description",
'preamble': "The following directories in the system PATH can be manipulated by non-administrator users:",
},
}
},
'WPC014': {
'title': "Insecure Permissions On Files / Directories In Current User's PATH",
'description': '''Some programs/directories in the path of the user used to perform this audit have weak permissions. TODO which user was used to perform this audit?''',
'recommendation': '''Review the permissions that have been granted to non-administrative users and revoke access where possible.''',
'supporting_data': {
'weak_perms_exe': {
'section': "description",
'preamble': "The following programs/DLLs in current user's PATH can be manipulated by non-administrator users:",
},
'weak_perms_dir': {
'section': "description",
'preamble': "The following directories in the current user's PATH can be manipulated by non-administrator users:",
},
}
},
'WPC015': {
'title': "Insecure Permissions On Files / Directories In Users' PATHs (NEED TO CHECK THIS WORKS)",
'description': '''Some programs/directories in the paths of users on this system have weak permissions.''',
'recommendation': '''Review the permissions that have been granted to non-administrative users and revoke access where possible.''',
'supporting_data': {
'weak_perms_exe': {
'section': "description",
'preamble': "The following programs/DLLs in users' PATHs can be manipulated by non-administrator users:",
},
'weak_perms_dir': {
'section': "description",
'preamble': "The following directories in users' PATHs can be manipulated by non-administrator users:",
},
}
},
'WPC016': {
'title': "Insecure Permissions On Running Programs",
'description': '''Some programs running at the time of the audit have weak file permissions. The corresponding programs could be altered by non-administrator users.''',
'recommendation': '''Review the permissions that have been granted to non-administrative users and revoke access where possible.''',
'supporting_data': {
'weak_perms_exes': {
'section': "description",
'preamble': "The following programs were running at the time of the audit, but could be changed on-disk by non-administrator users:",
},
'weak_perms_dlls': {
'section': "description",
'preamble': "The following DLLs are used by program which were running at the time of the audit. These DLLs can be changed on-disk by non-administrator users:",
},
}
},
'WPC017': {
'title': "Shares Accessible By Non-Admin Users",
'description': '''The share-level permissions on some Windows file shares allows access by non-administrative users. This can often be desirable, in which case this issue can be ignored. However, sometimes it can allow data to be stolen or programs to be malciously modified. NB: Setting strong NTFS permissions can sometimes mean that data which seems to be exposed on a share actually isn't accessible.''',
'recommendation': '''Review the share-level permissions that have been granted to non-administrative users and revoke access where possible. Share-level permissions can be viewed in Windows Explorer: Right-click folder | Sharing and Security | "Sharing" tab | "Permissions" button (for XP - other OSs may vary slightly).''',
'supporting_data': {
'non_admin_shares': {
'section': "description",
'preamble': "The following shares are accessible by non-administrative users:",
},
}
},
}
issue_template_html = '''
<h3>REPLACE_TITLE</h3>
<table>
<tr>
<td>
<b>Description</b>
</td>
<td>
REPLACE_DESCRIPTION
REPLACE_DESCRIPTION_DATA
</td>
</tr>
<tr>
<td>
<b>Recommendation</b>
</td>
<td>
REPLACE_RECOMMENDATION
REPLACE_RECOMMENDATION_DATA
</td>
</tr>
</table>
'''
issue_list_html ='''
REPLACE_PREAMBLE
<ul>
REPLACE_ITEM
</ul>
'''
# TODO nice looking css, internal links, risk ratings
# TODO record group members for audit user, separate date and time; os and sp
overview_template_html = '''
<html>
<head>
<style type="text/css">
body {color:black}
td
{
vertical-align:top;
}
h1 {font-size: 300%; text-align:center}
h2 {font-size: 200%; margin-top: 25px; margin-bottom: 0px; padding: 5px; background-color: #CCCCCC;}
h3 {font-size: 150%; font-weight: normal; padding: 5px; background-color: #EEEEEE; margin-top: 10px;}
#frontpage {height: 270px; background-color: #F3F3F3;}
p.ex {color:rgb(0,0,255)}
#customers
{
font-family:"Trebuchet MS", Arial, Helvetica, sans-serif;
/* width:100%; */
padding:10px 0px 0px 0px;
border-collapse:collapse;
}
#customers td, #customers th
{
font-size:1em;
border:1px solid #989898;
padding:3px 7px 2px 7px;
}
#customers th
{
font-size:1.1em;
text-align:left;
padding-top:5px;
padding-bottom:4px;
background-color:#A7C942;
color:#ffffff;
}
#customers tr.alt td
{
color:#000000;
background-color:#EAF2D3;
}
</style>
</head>
<div id="frontpage">
<h1><p>windows-privesc-check</p> <p>Audit of Host: </p><p>REPLACE_HOSTNAME</p></h1>
</div>
<h2>Contents</h2>
REPLACE_CONTENTS
<h2>Information about this Audit</h2>
<p>This report was generated on REPLACE_DATETIME by vREPLACE_VERSION of <a href="http://pentestmonkey.net/windows-privesc-check">windows-privesc-check</a>.</p>
<p>The audit was run as the user REPLACE_AUDIT_USER.</p>
<p>The following table provides information about this audit:</p>
<table id="customers" border="1">
<tr>
<td>Hostname</td>
<td>REPLACE_HOSTNAME</td>
</tr>
<tr class="alt">
<td>Domain/Workgroup</td>
<td>REPLACE_DOMWKG</td>
</tr>
<tr>
<td>Operating System</td>
<td>REPLACE_OS</td>
</tr>
<tr class="alt">
<td>IP Addresses</td>
<td><ul>REPLACE_IPS</ul></td>
</tr>
</table>
<h2>Escalation Vectors</h2>
REPLACE_ISSUES
<h2>Scan Parameters</h2>
For the purposes of the audit the following users were considered to be trusted. Any privileges assigned to them have not been considered as potential attack vectors:
<ul>
REPLACE_TRUSTED_USERS
</ul>
Additionally members of the following groups were considered trusted:
<ul>
REPLACE_TRUSTED_GROUPS
</ul>
The following file/directory/registry permissions were considered to be potentially dangerous. This audit exclusively searched for instances of these permissions:
<ul>
REPLACE_DANGEROUS_PERMS
</ul>
</html>
'''
def handle_unicode(u):
try:
return u.encode('ascii','replace')
except:
return u.decode("ascii",'ignore')
def usage():
print "Usage: windows-privesc-check [options] checks"
print ""
print "checks must be at least one of:"
print " -a|--all_checks Run all security checks (see below)"
print " -r|--registry_checks Check RunOnce and other critical keys"
print " -t|--path_checks Check %PATH% for insecure permissions"
print " -S|--service_checks Check Windows services for insecure permissions"
print " -d|--drive_checks Check for FAT filesystems and weak perms in root dir"
print " -E|--eventlog_checks Check Event Logs for insecure permissions"
print " -F|--progfiles_checks Check Program Files directories for insecure perms"
print " -R|--process_checks Check Running Processes for insecure permissions"
print " -H|--share_checks Check shares for insecure permissions"
#print " -T|--patch_checks Check some important patches"
print " -U|--user_groups Dump users, groups and privileges (no HTML yet)"
print " -A|--admin_users Dump admin users / high priv users (no HTML yet)"
print " -O|--processes Dump process info (no HTML yet)"
print " -P|--passpol Dump password policy (no HTML yet)"
print " -i|--host_info Dump host info - OS, domain controller, ... (no HTML yet)"
print " -e|--services Dump service info (no HTML yet)"
# TODO options to flag a user/group as trusted
print ""
print "options are:"
print " -h|--help This help message"
print " -w|--write_perms_only Only list write perms (dump opts only)"
print " -I|--ignore_trusted Ignore trusted users, empty groups (dump opts only)"
print " -W|--owner_info Owner, Group info (dump opts only)"
print " -v|--verbose More detail output (use with -U)"
print " -o|--report_file file Report filename. Default privesc-report-[host].html"
print " -s|--server host Remote server name. Only works with -u!"
print " -u|--username arg Remote username. Only works with -u!"
print " -p|--password arg Remote password. Only works with -u!"
print " -d|--domain arg Remote domain. Only works with -u!"
print ""
sys.exit(0)
#
# Reporting functions
#
def format_issues(format, issue_template, issue_data):
report = ""
toc = ""
overview = overview_template_html
overview = overview.replace('REPLACE_HOSTNAME', audit_data['hostname'])
overview = overview.replace('REPLACE_DOMWKG', audit_data['domwkg'])
# overview = overview.replace('REPLACE_IPS', "<li>" + "</li><li>".join(audit_data['ips']) + "</li>")
for item in audit_data['ips']:
overview = overview.replace('REPLACE_IPS', list_item("REPLACE_IPS", item))
overview = overview.replace('REPLACE_IPS', '')
overview = overview.replace('REPLACE_OS', audit_data['os_name'] + " (" + audit_data['os_version'] + ")")
overview = overview.replace('REPLACE_VERSION', audit_data['version'])
overview = overview.replace('REPLACE_DATETIME', audit_data['datetime'])
overview = overview.replace('REPLACE_AUDIT_USER', audit_data['audit_user'])
for item in audit_data['trusted_users']:
overview = overview.replace('REPLACE_TRUSTED_USERS', list_item("REPLACE_TRUSTED_USERS", item))
overview = overview.replace('REPLACE_TRUSTED_USERS', '')
for item in audit_data['trusted_groups']:
overview = overview.replace('REPLACE_TRUSTED_GROUPS', list_item("REPLACE_TRUSTED_GROUPS", item))
overview = overview.replace('REPLACE_TRUSTED_GROUPS', '')
permlist = ''
for permtype in dangerous_perms_write.keys():
permlist += "Permission type '" + permtype + "'<p>"
permlist += "<ul>"
for location in dangerous_perms_write[permtype].keys():
for item in dangerous_perms_write[permtype][location]:
permlist += "\t<li>" + item + "</li>"
permlist += "</ul>"
#for item in audit_data['dangerous_privs']:
# overview = overview.replace('REPLACE_DANGEROUS_PERM', list_item("REPLACE_DANGEROUS_PERM", item))
#overview = overview.replace('REPLACE_DANGEROUS_PERM', '')
overview = overview.replace('REPLACE_DANGEROUS_PERMS', permlist)
for item in audit_data['ips']:
overview = overview.replace('REPLACE_IP', list_item("REPLACE_IPS", item))
overview = overview.replace('REPLACE_IP', '')
for issue_no in issue_data:
# print "[V] Processing issue issue_no\n"
report = report + format_issue(format, issue_no, issue_data, issue_template)
toc = toc + '<a href="#' + issue_template[issue_no]['title'] + '">' + issue_template[issue_no]['title'] + "</a><p>"
if report:
overview = overview.replace('REPLACE_ISSUES', report)
overview = overview.replace('REPLACE_CONTENTS', toc)
else:
overview = overview.replace('REPLACE_ISSUES', "No issues found")
overview = overview.replace('REPLACE_CONTENTS', "No issues found")
return overview
def list_item(tag, item):
return "<li>" + item + "</li>\n" + tag
def format_issue(format, issue_no, issue_data, issue_template): # $format is xml, html, or text
if not issue_no in issue_template:
print "[E] Can't find an issue template for issue number issue_no. Bug!"
sys.exit(1)
issue = issue_template_html
issue = issue.replace('REPLACE_TITLE', '<a name="' + issue_template[issue_no]['title'] + '">' + issue_template[issue_no]['title'] + '</a>')
description = issue_template[issue_no]['description']
description = description.replace('\n\n+', "<p>\n")
for key in issue_data[issue_no]:
#print "[D] Processing data for %s" % key
# print "[D] $key has type issue_data[issue_no]['$key']['type']\n"
#if issue_data[issue_no][key]['type'] == "list":
# TODO alter data structre to include type
#section = issue_template[issue_no]['supporting_data'][key]['section']
# print "[D] Data belongs to section section\n"
#if (section == "description"):
preamble = issue_template[issue_no]['supporting_data'][key]['preamble']
data = issue_list_html
data = data.replace('REPLACE_PREAMBLE', preamble)
for item in issue_data[issue_no][key]:
# TODO alter data structure to include data
# print "Processing item " + item
perm_string = " ".join(issue_data[issue_no][key][item])
data = data.replace('REPLACE_ITEM', list_item("REPLACE_ITEM", item + ": " + perm_string))
data = data.replace('REPLACE_ITEM', '')
issue = issue.replace('REPLACE_DESCRIPTION_DATA', data + "\nREPLACE_DESCRIPTION_DATA")
#elif section == "recommendation":
# pass
#issue = issue.replace('REPLACE_RECOMMENDATION_DATA', "data\nREPLACE_DESCRIPTION_DATA',
issue = issue.replace('REPLACE_RECOMMENDATION_DATA', '')
issue = issue.replace('REPLACE_DESCRIPTION_DATA', '')
issue = issue.replace('REPLACE_DESCRIPTION', description + "<p>\n")
recommendation = issue_template[issue_no]['recommendation']
issue = issue.replace('REPLACE_RECOMMENDATION', recommendation + "<p>\n")
recommendation = recommendation.replace('\n\n+', '<p>\n')
return issue
def format_audit_data(format, audit_data): # $format is xml, html, or text
print "format_audit_data not implemented yet"
# Inputs:
# string: issue_name
# array: weak_perms
def save_issue(issue_name, data_type, weak_perms):
#print weak_perms
global issues
if not issue_name in issues:
issues[issue_name] = {}
#if not 'supporting_data' in issues[issue_name]:
# issues[issue_name]['supporting_data'] = {}
for weak_perm in weak_perms:
object = weak_perm[0]
domain = weak_perm[1]
name = weak_perm[2]
permission = weak_perm[3]
#print repr((object,domain,name))
key = u"%s has the following permissions granted for %s\\%s" % (handle_unicode(object),handle_unicode(domain),handle_unicode(name))
#handle_unicode(object) + u" has the following permissions granted for " + handle_unicode(domain) + u"\\" + handle_unicode(name)
if not data_type in issues[issue_name]:
issues[issue_name][data_type]= {}
if not key in issues[issue_name][data_type]:
issues[issue_name][data_type][key] = []
issues[issue_name][data_type][key].append(permission)
issues[issue_name][data_type][key] = list(set(issues[issue_name][data_type][key])) # dedup
def save_issue_string(issue_name, data_type, issue_string):
#print weak_perms
global issues
if not issue_name in issues:
issues[issue_name] = {}
if not data_type in issues[issue_name]:
issues[issue_name][data_type]= {}
if not issue_string in issues[issue_name][data_type]:
issues[issue_name][data_type][issue_string] = []
# args: string, string
# Returns 1 if the principle provided is trusted (admin / system / user-definted trusted principle)
# Returns 0 otherwise
def principle_is_trusted(principle, domain):
#print "is_trusted principle: "+repr(principle)
#print "is_trusted: "+repr(trusted_principles_fq)
if domain + "\\" + principle in trusted_principles_fq:
return 1
if principle in trusted_principles:
return 1
global tmp_trusted_principles_fq
if domain + "\\" + principle in tmp_trusted_principles_fq:
return 1
# Consider groups with zero members to be trusted too
try:
memberdict, total, rh = win32net.NetLocalGroupGetMembers(remote_server, principle , 1 , 0 , 100000 )
if len(memberdict) == 0:
return 1
except:
# If a user is a member of a trusted group (like administrators), then they are trusted
try:
group_attrs = win32net.NetUserGetLocalGroups(remote_server, principle)
if set(group_attrs).intersection(set(trusted_principles)):
return 1
except:
pass
return 0
# for memberinfo in memberdict:
# print "\t" + memberinfo['name'] + " (" + win32security.ConvertSidToStringSid(memberinfo['sid']) + ")"
# TODO ignore groups that only contain administrators
# There are all possible objects. SE_OBJECT_TYPE (http://msdn.microsoft.com/en-us/library/aa379593(VS.85).aspx):
# win32security.SE_UNKNOWN_OBJECT_TYPE
# win32security.SE_FILE_OBJECT
# win32security.SE_SERVICE
# win32security.SE_PRINTER
# win32security.SE_REGISTRY_KEY
# win32security.SE_LMSHARE
# win32security.SE_KERNEL_OBJECT
# win32security.SE_WINDOW_OBJECT
# win32security.SE_DS_OBJECT
# win32security.SE_DS_OBJECT_ALL
# win32security.SE_PROVIDER_DEFINED_OBJECT
# win32security.SE_WMIGUID_OBJECT
# win32security.SE_REGISTRY_WOW64_32KEY
# object_type_s is one of
# service
# file
# dir
def check_weak_perms(object_name, object_type_s, perms):
object_type = None
if object_type_s == 'file':
object_type = win32security.SE_FILE_OBJECT
if object_type_s == 'directory':
object_type = win32security.SE_FILE_OBJECT
if object_type_s == 'service':
object_type = win32security.SE_SERVICE
if object_type == win32security.SE_FILE_OBJECT:
# if not os.path.exists(object_name):
# print "WARNING: %s doesn't exist" % object_name
if os.path.isfile(object_name):
object_type_s = 'file'
else:
object_type_s = 'directory'
if object_type == None:
print "ERROR: Unknown object type %s" % object_type_s
exit(1)
try:
sd = win32security.GetNamedSecurityInfo (
object_name,
object_type,
win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION
)
except:
# print "WARNING: Can't get security descriptor for " + object_name + ". skipping. (" + details[2] + ")"
return []
return check_weak_perms_sd(object_name, object_type_s, sd, perms)
def check_weak_write_perms_by_sd(object_name, object_type_s, sd):
return check_weak_perms_sd(object_name, object_type_s, sd, dangerous_perms_write)
def check_weak_perms_sd(object_name, object_type_s, sd, perms):
dacl= sd.GetSecurityDescriptorDacl()
if dacl == None:
print "No Discretionary ACL"
return []
owner_sid = sd.GetSecurityDescriptorOwner()
try:
owner_name, owner_domain, type = win32security.LookupAccountSid(remote_server, owner_sid)
owner_fq = owner_domain + "\\" + owner_name
except:
try:
owner_fq = owner_name = win32security.ConvertSidToStringSid(owner_sid)
owner_domain = ""
except:
owner_domain = ""
owner_fq = owner_name = "INVALIDSID!"
weak_perms = []
for ace_no in range(0, dacl.GetAceCount()):
#print "[D] ACE #%d" % ace_no
ace = dacl.GetAce(ace_no)
flags = ace[0][1]
try:
principle, domain, type = win32security.LookupAccountSid(remote_server, ace[2])
except:
principle = win32security.ConvertSidToStringSid(ace[2])
domain = ""
#print "[D] ACE is for %s\\%s" % (principle, domain)
#print "[D] ACE Perm mask: " + int2bin(ace[1])
#print "[D] ace_type: " + str(ace[0][0])
#print "[D] DACL: " + win32security.ConvertSecurityDescriptorToStringSecurityDescriptor(sd, win32security.SDDL_REVISION_1, win32security.DACL_SECURITY_INFORMATION)
if principle_is_trusted(principle, domain):
#print "[D] Ignoring trusted principle %s\\%s" % (principle, domain)
continue
if principle == "CREATOR OWNER":
if principle_is_trusted(owner_name, owner_domain):
continue
else:
principle = "CREATOR OWNER [%s]" % owner_fq
for i in ("ACCESS_ALLOWED_ACE_TYPE", "ACCESS_DENIED_ACE_TYPE", "SYSTEM_AUDIT_ACE_TYPE", "SYSTEM_ALARM_ACE_TYPE"):
if getattr(ntsecuritycon, i) == ace[0][0]:
ace_type_s = i
if not ace_type_s == "ACCESS_ALLOWED_ACE_TYPE":
vprint("WARNING: Unimplmented ACE type encountered: " + ace_type_s + ". skipping.")
continue
for mod, perms_tuple in perms[object_type_s].iteritems():
for perm in perms_tuple:
if getattr(mod, perm) & ace[1] == getattr(mod, perm):
weak_perms.append([object_name, domain, principle, perm])
return weak_perms
def dump_perms(object_name, object_type_s, options={}):
object_type = None
if object_type_s == 'file':
object_type = win32security.SE_FILE_OBJECT
if object_type_s == 'directory':
object_type = win32security.SE_FILE_OBJECT
if object_type_s == 'service':
object_type = win32security.SE_SERVICE
if object_type == win32security.SE_FILE_OBJECT:
# if not os.path.exists(object_name):
# print "WARNING: %s doesn't exist" % object_name
if os.path.isfile(object_name):
object_type_s = 'file'
else:
object_type_s = 'directory'
if object_type == None:
print "ERROR: Unknown object type %s" % object_type_s
exit(1)
try:
sd = win32security.GetNamedSecurityInfo (
object_name,
object_type,
win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION
)
except:
# print "WARNING: Can't get security descriptor for " + object_name + ". skipping. (" + details[2] + ")"
return []
return dump_sd(object_name, object_type_s, sd, options)
def dump_sd(object_name, object_type_s, sd, options={}):
perms = all_perms
if not sd:
return
dacl = sd.GetSecurityDescriptorDacl()
if dacl == None:
print "No Discretionary ACL"
return []
owner_sid = sd.GetSecurityDescriptorOwner()
try:
owner_name, owner_domain, type = win32security.LookupAccountSid(remote_server, owner_sid)
owner_fq = owner_domain + "\\" + owner_name
except:
try:
owner_fq = owner_name = win32security.ConvertSidToStringSid(owner_sid)
owner_domain = ""
except:
owner_domain = ""
owner_fq = owner_name = None
group_sid = sd.GetSecurityDescriptorGroup()
try:
group_name, group_domain, type = win32security.LookupAccountSid(remote_server, group_sid)
group_fq = group_domain + "\\" + group_name
except:
try:
group_fq = group_name = win32security.ConvertSidToStringSid(group_sid)
group_domain = ""
except:
group_domain = ""
group_fq = group_name = "[none]"
if owner_info:
print "\tOwner: " + str(owner_fq)
print "\tGroup: " + str(group_fq)
weak_perms = []
dump_acl(object_name, object_type_s, dacl, options)
return
def dump_acl(object_name, object_type_s, sd, options={}):
dacl = sd
if dacl == None:
print "No Discretionary ACL"
return []
weak_perms = []
for ace_no in range(0, dacl.GetAceCount()):
# print "[D] ACE #%d" % ace_no
ace = dacl.GetAce(ace_no)
flags = ace[0][1]
try:
principle, domain, type = win32security.LookupAccountSid(remote_server, ace[2])
except:
principle = win32security.ConvertSidToStringSid(ace[2])
domain = ""
mask = ace[1]
if ace[1] < 0:
mask = ace[1] + 2**32
if ignore_trusted and principle_is_trusted(principle, domain):
# print "[D] Ignoring trusted principle %s\\%s" % (principle, domain)
continue
if principle == "CREATOR OWNER":
if ignore_trusted and principle_is_trusted(owner_name, owner_domain):
#print "[D] Ignoring trusted principle (creator owner) %s\\%s" % (principle, domain)
continue
else:
principle = "CREATOR OWNER [%s\%s]" % (domain, principle)
for i in ("ACCESS_ALLOWED_ACE_TYPE", "ACCESS_DENIED_ACE_TYPE", "SYSTEM_AUDIT_ACE_TYPE", "SYSTEM_ALARM_ACE_TYPE"):
if getattr(ntsecuritycon, i) == ace[0][0]:
ace_type_s = i
ace_type_short = ace_type_s
if ace_type_s == "ACCESS_DENIED_ACE_TYPE":
ace_type_short = "DENY"
if ace_type_s == "ACCESS_ALLOWED_ACE_TYPE":
ace_type_short = "ALLOW"
if weak_perms_only:
perms = dangerous_perms_write
else:
perms = all_perms
for mod, perms_tuple in perms[object_type_s].iteritems():
for perm in perms_tuple:
#print "Checking for perm %s in ACE %s" % (perm, mask)
if getattr(mod, perm) & mask == getattr(mod, perm):
weak_perms.append([object_name, domain, principle, perm, ace_type_short])
print_weak_perms(object_type_s, weak_perms, options)
def check_weak_write_perms(object_name, object_type_s):
return check_weak_perms(object_name, object_type_s, dangerous_perms_write)
def check_registry():
for key_string in reg_paths:
parts = key_string.split("\\")
hive = parts[0]
key_string = "\\".join(parts[1:])
try:
keyh = win32api.RegOpenKeyEx(getattr(win32con, hive), key_string, 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
#print "Can't open: " + hive + "\\" + key_string
continue
sd = win32api.RegGetKeySecurity(keyh, win32security.DACL_SECURITY_INFORMATION | win32security.OWNER_SECURITY_INFORMATION)
weak_perms = check_weak_write_perms_by_sd(hive + "\\" + key_string, 'reg', sd)
if weak_perms:
vprint(hive + "\\" + key_string)
#print weak_perms
if verbose == 0:
sys.stdout.write(".")
save_issue("WPC003", "writable_reg_paths", weak_perms)
# print_weak_perms("x", weak_perms)
print
# TODO save_issue("WPC009", "writable_eventlog_key", weak_perms) # weak perms on event log reg key
def check_event_logs():
key_string = "HKEY_LOCAL_MACHINE\\" + eventlog_key_hklm
try:
keyh = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, eventlog_key_hklm , 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
print "Can't open: " + key_string
return 0
subkeys = win32api.RegEnumKeyEx(keyh)
for subkey in subkeys:
# print key_string + "\\" + subkey[0]
sys.stdout.write(".")
try:
subkeyh = win32api.RegOpenKeyEx(keyh, subkey[0] , 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
print "Can't open: " + key_string
else:
subkey_count, value_count, mod_time = win32api.RegQueryInfoKey(subkeyh)
# print "\tChild Nodes: %s subkeys, %s values" % (subkey_count, value_count)
try:
filename, type = win32api.RegQueryValueEx(subkeyh, "DisplayNameFile")
except:
pass
else:
weak_perms = check_weak_write_perms(os.path.expandvars(filename), 'file')
if weak_perms:
# print "------------------------------------------------"
# print "Weak permissions found on event log display DLL:"
# print_weak_perms("File", weak_perms)
sys.stdout.write("!")
save_issue("WPC008", "writable_eventlog_dll", weak_perms)
try:
filename, type = win32api.RegQueryValueEx(subkeyh, "File")
except:
pass
else:
weak_perms = check_weak_write_perms(os.path.expandvars(filename), 'file')
if weak_perms:
# print "------------------------------------------------"
# print "Weak permissions found on event log file:"
# print_weak_perms("File", weak_perms)
sys.stdout.write("!")
save_issue("WPC007", "writable_eventlog_file", weak_perms)
print
#sd = win32api.RegGetKeySecurity(subkeyh, win32security.DACL_SECURITY_INFORMATION) # TODO: get owner too?
#print "\tDACL: " + win32security.ConvertSecurityDescriptorToStringSecurityDescriptor(sd, win32security.SDDL_REVISION_1, win32security.DACL_SECURITY_INFORMATION)
def get_extra_privs():
# Try to give ourselves some extra privs (only works if we're admin):
# SeBackupPrivilege - so we can read anything
# SeDebugPrivilege - so we can find out about other processes (otherwise OpenProcess will fail for some)
# SeSecurityPrivilege - ??? what does this do?
# Problem: Vista+ support "Protected" processes, e.g. audiodg.exe. We can't see info about these.
# Interesting post on why Protected Process aren't really secure anyway: http://www.alex-ionescu.com/?p=34
th = win32security.OpenProcessToken(win32api.GetCurrentProcess(), win32con.TOKEN_ADJUST_PRIVILEGES | win32con.TOKEN_QUERY)
privs = win32security.GetTokenInformation(th, TokenPrivileges)
newprivs = []
for privtuple in privs:
if privtuple[0] == win32security.LookupPrivilegeValue(remote_server, "SeBackupPrivilege") or privtuple[0] == win32security.LookupPrivilegeValue(remote_server, "SeDebugPrivilege") or privtuple[0] == win32security.LookupPrivilegeValue(remote_server, "SeSecurityPrivilege"):
print "Added privilege " + str(privtuple[0])
# privtuple[1] = 2 # tuples are immutable. WHY?!
newprivs.append((privtuple[0], 2)) # SE_PRIVILEGE_ENABLED
else:
newprivs.append((privtuple[0], privtuple[1]))
# Adjust privs
privs = tuple(newprivs)
str(win32security.AdjustTokenPrivileges(th, False , privs))
def audit_processes():
get_extra_privs()
# Things we might want to know about a process:
# TCP/UDP/Local sockets
# Treads - and the tokens of each (API doesn't support getting a thread handle!)
# Shared memory
pids = win32process.EnumProcesses()
for pid in sorted(pids):
print "---------------------------------------------------------"
print "PID: %s" % pid
# TODO there's a security descriptor for each process accessible via GetSecurityInfo according to http://msdn.microsoft.com/en-us/library/ms684880%28VS.85%29.aspx
ph = 0
gotph = 0
try:
# PROCESS_VM_READ is required to list modules (DLLs, EXE)
ph = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION | win32con.PROCESS_VM_READ, False, pid)
gotph = 1
vprint("OpenProcess with VM_READ and PROCESS_QUERY_INFORMATION: Success")
except:
print("OpenProcess with VM_READ and PROCESS_QUERY_INFORMATION: Failed")
try:
# We can still get some info without PROCESS_VM_READ
ph = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION , False, pid)
gotph = 1
vprint("OpenProcess with PROCESS_QUERY_INFORMATION: Success")
except:
print "OpenProcess with PROCESS_QUERY_INFORMATION: Failed"
try:
# If we have to resort to using PROCESS_QUERY_LIMITED_INFORMATION, the process is protected.
# There's no point trying PROCESS_VM_READ
ph = win32api.OpenProcess(win32con.PROCESS_QUERY_LIMITED_INFORMATION , False, pid)
gotph = 1
vprint("OpenProcess with PROCESS_QUERY_LIMITED_INFORMATION: Success")
except:
print "OpenProcess with PROCESS_QUERY_LIMITED_INFORMATION: Failed"
# Move onto the next process. We don't have a process handle!
exe = "[unknown]"
gotexe = 0
mhs = 0
try:
mhs = win32process.EnumProcessModules(ph)
mhs = list(mhs)
exe = win32process.GetModuleFileNameEx(ph, mhs.pop(0))
gotexe = 1
except:
pass
print "Filename: %s" % exe
gottokenh = 0
try:
tokenh = win32security.OpenProcessToken(ph, win32con.TOKEN_QUERY)
gottokenh = 1
sidObj, intVal = win32security.GetTokenInformation(tokenh, TokenUser)
if sidObj:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sidObj)
print "TokenUser: %s\%s (type %s)" % (domainName, accountName, accountTypeInt)
sidObj = win32security.GetTokenInformation(tokenh, TokenOwner)
if sidObj:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sidObj)
print "TokenOwner: %s\%s (type %s)" % (domainName, accountName, accountTypeInt)
sidObj = win32security.GetTokenInformation(tokenh, TokenPrimaryGroup)
if sidObj:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sidObj)
print "TokenPrimaryGroup: %s\%s (type %s)" % (domainName, accountName, accountTypeInt)
except:
print "OpenProcessToken with TOKEN_QUERY: Failed"
print "TokenUser: Unknown"
print "TokenOwner: Unknown"
print "TokenPrimaryGroup: Unknown"
pass
user = "unknown\\unknown"
# TODO I'm not sure how to interogate threads.
# There's no OpenThread() in win32api. I need a thread handle before I can get Thread Tokens.
# The code below lists threadid's, be we can't use the handle (it's not a PyHandle)
#
# hThreadSnap = CreateToolhelp32Snapshot (TH32CS_SNAPTHREAD, pid)
# if hThreadSnap == INVALID_HANDLE_VALUE:
# print "Failed to get Thread snapshot"
# else:
# te32 = Thread32First (hThreadSnap)
# if te32:
# while True:
# if te32.th32OwnerProcessID == pid:
# hThread = OpenThread (win32con.THREAD_QUERY_INFORMATION, FALSE, te32.th32ThreadID)
# print "PID %s, ThreadID %s" % (pid, te32.th32ThreadID)
# print "Priority: " + str(win32process.GetThreadPriority(hThread))
# CloseHandle (hThread)
# te32 = Thread32Next (hThreadSnap)
# if not te32:
# break
# CloseHandle (hThreadSnap)
# except:
# print "EnumProcessModules: Failed"
# continue
# print "EnumProcessModules: Success"
if ph:
print "IsWow64 Process: %s" % win32process.IsWow64Process(ph)
if gottokenh:
vprint("OpenProcessToken with TOKEN_QUERY: Success")
imp_levels = {
"SecurityAnonymous": 0,
"SecurityIdentification": 1,
"SecurityImpersonation": 2,
"SecurityDelegation": 3
}
#for ilevel in imp_levels.keys():
#sys.stdout.write("Trying DuplicateToken with " + ilevel)
#try:
#win32security.DuplicateToken(tokenh, imp_levels[ilevel])
#print "success"
#except:
#print "failed"
tokentype = win32security.GetTokenInformation(tokenh, TokenType)
tokentype_str = "TokenImpersonation"
if tokentype == 1:
tokentype_str = "TokenPrimary"
print "Token Type: " + tokentype_str
print "Logon Session ID: " + str(win32security.GetTokenInformation(tokenh, TokenOrigin))
try:
source = win32security.GetTokenInformation(tokenh, TokenSource)
print "Token Source: " + source
except:
print "Token Source: Unknown (Access Denied)"
try:
print "TokenImpersonationLevel: %s" % win32security.GetTokenInformation(tokenh, TokenImpersonationLevel) # doesn't work on xp
except:
pass
try:
r = win32security.GetTokenInformation(tokenh, TokenHasRestrictions) # doesn't work on xp
if r == 0:
print "TokenHasRestrictions: 0 (not filtered)"
else:
print "TokenHasRestrictions: %s (token has been filtered)" % r
except:
pass
try:
e = win32security.GetTokenInformation(tokenh, TokenElevationType) # vista
if e == 1:
print "TokenElevationType: TokenElevationTypeDefault"
elif e == 2:
print "TokenElevationType: TokenElevationTypeFull"
elif e == 3:
print "TokenElevationType: TokenElevationTypeLimited"
else:
print "TokenElevationType: Unknown (%s)" % e
except:
pass
try:
print "TokenUIAccess: %s" % win32security.GetTokenInformation(tokenh, TokenUIAccess) # doesn't work on xp
except:
pass
try:
print "TokenLinkedToken: %s" % win32security.GetTokenInformation(tokenh, TokenLinkedToken) # vista
except:
pass
try:
print "TokenLogonSid: %s" % win32security.GetTokenInformation(tokenh, TokenLogonSid) # doesn't work on xp
print "TokenElevation: %s" % win32security.GetTokenInformation(tokenh, TokenElevation) # vista
except:
pass
try:
sid, i = win32security.GetTokenInformation(tokenh, TokenIntegrityLevel) # vista
try:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(None, sid)
user = domainName + "\\" + accountName + " (" + win32security.ConvertSidToStringSid(sid) + ")"
except:
user = win32security.ConvertSidToStringSid(sid)
print "TokenIntegrityLevel: %s %s" % (user, i)
except:
pass
try:
m = win32security.GetTokenInformation(tokenh, TokenMandatoryPolicy) # vista
if m == 0:
print "TokenMandatoryPolicy: OFF"
elif m == 1:
print "TokenMandatoryPolicy: NO_WRITE_UP"
elif m == 2:
print "TokenMandatoryPolicy: NEW_PROCESS_MIN"
elif m == 3:
print "TokenMandatoryPolicy: POLICY_VALID_MASK"
else:
print "TokenMandatoryPolicy: %s" % m
except:
pass
print "Token Resitrcted Sids: " + str(win32security.GetTokenInformation(tokenh, TokenRestrictedSids))
print "IsTokenRestricted: " + str(win32security.IsTokenRestricted(tokenh))
print "\nToken Groups: "
for tup in win32security.GetTokenInformation(tokenh, TokenGroups):
sid = tup[0]
attr = tup[1]
attr_str = attr
if attr < 0:
attr = 2**32 + attr
attr_str_a = []
if attr & 1:
# attr_str_a.append("SE_GROUP_MANDATORY")
attr_str_a.append("MANDATORY")
if attr & 2:
# attr_str_a.append("SE_GROUP_ENABLED_BY_DEFAULT")
attr_str_a.append("ENABLED_BY_DEFAULT")
if attr & 4:
# attr_str_a.append("SE_GROUP_ENABLED")
attr_str_a.append("ENABLED")
if attr & 8:
# attr_str_a.append("SE_GROUP_OWNER")
attr_str_a.append("OWNER")
if attr & 0x40000000:
# attr_str_a.append("SE_GROUP_LOGON_ID")
attr_str_a.append("LOGON_ID")
attr_str = ("|".join(attr_str_a))
try:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sid)
user = domainName + "\\" + accountName + " (" + win32security.ConvertSidToStringSid(sid) + ")"
except:
user = win32security.ConvertSidToStringSid(sid)
print "\t%s: %s" % (user, attr_str)
# Link that explains how privs are added / removed from tokens:
# http://support.microsoft.com/kb/326256
print "\nToken Privileges:"
privs = win32security.GetTokenInformation(tokenh, TokenPrivileges)
for priv_tuple in privs:
priv_val = priv_tuple[0]
attr = priv_tuple[1]
attr_str = "unknown_attr(" + str(attr) + ")"
attr_str_a = []
if attr == 0:
attr_str_a.append("[disabled but not removed]")
if attr & 1:
# attr_str_a.append("SE_PRIVILEGE_ENABLED_BY_DEFAULT")
attr_str_a.append("ENABLED_BY_DEFAULT")
if attr & 2:
# attr_str_a.append("SE_PRIVILEGE_ENABLED")
attr_str_a.append("ENABLED")
if attr & 0x80000000:
# attr_str_a.append("SE_PRIVILEGE_USED_FOR_ACCESS")
attr_str_a.append("USED_FOR_ACCESS")
if attr & 4:
# attr_str_a.append("SE_PRIVILEGE_REMOVED")
attr_str_a.append("REMOVED")
if attr_str_a:
attr_str = ("|").join(attr_str_a)
print "\t%s: %s" % (win32security.LookupPrivilegeName(remote_server, priv_val), attr_str)
#print "\nProcess ACL (buggy - probably wrong):"
#dump_acl(pid, 'process', win32security.GetTokenInformation(tokenh, TokenDefaultDacl), {'brief': 1}) # TODO can't understand ACL
# sidObj = win32security.GetTokenInformation(tokenh, TokenOwner) # Owner returns "Administrators" instead of SYSTEM. It's not what we want.
# if sidObj:
# accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sidObj)
# print "User: %s\%s (type %s)" % (domainName, accountName, accountTypeInt)
if gotexe:
print "\nFile permissions on %s:" % exe
dump_perms(exe, 'file', {'brief': 1})
print
if mhs and ph:
for mh in mhs:
dll = win32process.GetModuleFileNameEx(ph, mh)
print "Loaded module: %s" % dll
dump_perms(dll, 'file', {'brief': 1})
print
def check_processes():
pids = win32process.EnumProcesses()
# TODO also check out WMI. It might not be running, but it could help if it is:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/1f50065064173ccb
# TODO process explorer can find quite a lot more information than this script. This script has several problems:
# TODO I can't open 64-bit processes for a 32-bit app. I get this error:
# ERROR: can't open 6100: 299 EnumProcessModules, Only part of a ReadProcessMemory
# or WriteProcessMemory request was completed.
# TODO I can't seem to get the name of elevated processes (user running as me, but with admin privs)
# TODO I can't get details of certain processes runnign as SYSTEM on xp (e.g. pid 4 "system", csrss.exe)
# TODO should be able to find name (and threads?) for all processes. Not necessarily path.
for pid in sorted(pids):
# TODO there's a security descriptor for each process accessible via GetSecurityInfo according to http://msdn.microsoft.com/en-us/library/ms684880%28VS.85%29.aspx
# TODO could we connect with PROCESS_QUERY_LIMITED_INFORMATION instead on Vista+
try:
ph = win32api.OpenProcess(win32con.PROCESS_VM_READ | win32con.PROCESS_QUERY_INFORMATION , False, pid)
except:
# print "ERROR: can't connected to PID " + str(pid)
sys.stdout.write("?")
continue
else:
user = "unknown\\unknown"
try:
tokenh = win32security.OpenProcessToken(ph, win32con.TOKEN_QUERY)
except:
pass
else:
sidObj, intVal = win32security.GetTokenInformation(tokenh, TokenUser)
#source = win32security.GetTokenInformation(tokenh, TokenSource)
if sidObj:
accountName, domainName, accountTypeInt = win32security.LookupAccountSid(remote_server, sidObj)
# print "pid=%d accountname=%s domainname=%s wow64=%s" % (pid, accountName, domainName, win32process.IsWow64Process(ph))
user = domainName + "\\" + accountName
# print "PID %d is running as %s" % (pid, user)
sys.stdout.write(".")
try:
mhs = win32process.EnumProcessModules(ph)
# print mhs
except:
continue
mhs = list(mhs)
exe = win32process.GetModuleFileNameEx(ph, mhs.pop(0))
weak_perms = check_weak_write_perms(exe, 'file')
# print_weak_perms("PID " + str(pid) + " running as " + user + ":", weak_perms)
if weak_perms:
save_issue("WPC016", "weak_perms_exes", weak_perms)
sys.stdout.write("!")
for mh in mhs:
# print "PID %d (%s) has loaded module: %s" % (pid, exe, win32process.GetModuleFileNameEx(ph, mh))
dll = win32process.GetModuleFileNameEx(ph, mh)
weak_perms = check_weak_write_perms(dll, 'file')
# print_weak_perms("DLL used by PID " + str(pid) + " running as " + user + " (" + exe + "):", weak_perms)
if weak_perms:
save_issue("WPC016", "weak_perms_dlls", weak_perms)
sys.stdout.write("!")
print
def check_services():
sch = win32service.OpenSCManager(remote_server, None, win32service.SC_MANAGER_ENUMERATE_SERVICE )
try:
# TODO Haven't seen this work - even when running as SYSTEM
sd = win32service.QueryServiceObjectSecurity(sch, win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION)
print check_weak_write_perms_by_sd("Service Manager", 'service_manager', sd)
except:
pass
# Need to connect to service (OpenService) with minimum privs to read DACL. Here are our options:
#
# http://www.pinvoke.net/default.aspx/advapi32/OpenSCManager.html?diff=y
# SC_MANAGER_ALL_ACCESS (0xF003F) Includes STANDARD_RIGHTS_REQUIRED, in addition to all access rights in this table.
# SC_MANAGER_CREATE_SERVICE (0x0002) Required to call the CreateService function to create a service object and add it to the database.
# SC_MANAGER_CONNECT (0x0001) Required to connect to the service control manager.
# SC_MANAGER_ENUMERATE_SERVICE (0x0004) Required to call the EnumServicesStatusEx function to list the services that are in the database.
# SC_MANAGER_LOCK (0x0008) Required to call the LockServiceDatabase function to acquire a lock on the database.
# SC_MANAGER_MODIFY_BOOT_CONFIG (0x0020) Required to call the NotifyBootConfigStatus function.
# SC_MANAGER_QUERY_LOCK_STATUS (0x0010)Required to call the QueryServiceLockStatus function to retrieve the lock status information for the database.
# GENERIC_READ
# GENERIC_WRITE
# GENERIC_EXECUTE
# GENERIC_ALL
services = win32service.EnumServicesStatus(sch, win32service.SERVICE_WIN32, win32service.SERVICE_STATE_ALL )
for service in services:
try:
sh = win32service.OpenService(sch, service[0] , win32service.SC_MANAGER_CONNECT )
service_info = win32service.QueryServiceConfig(sh)
except:
print "WARNING: Can't open service " + service[0]
continue
try:
sh = win32service.OpenService(sch, service[0] , win32con.GENERIC_READ )
sd = win32service.QueryServiceObjectSecurity(sh, win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION)
except:
# print "Service Perms: Unknown (Access Denied)"
continue
weak_perms = check_weak_write_perms_by_sd("Service \"" + service[1] + "\" (" + service[0] + ") which runs as user \"" + service_info[7] + "\"", 'service', sd)
binary = None
weak_perms_binary = []
if not remote_server:
binary = get_binary(service_info[3])
if binary:
weak_perms_binary = check_weak_write_perms(binary, 'file')
if weak_perms or weak_perms_binary:
vprint("-"*40)
vprint("Service: " + service[0])
vprint("Description: " + service[1])
vprint("Binary: " + service_info[3])
if binary:
vprint("Binary (clean): " + binary)
else:
vprint("Binary (clean): [Missing Binary]")
vprint("Run as: " + service_info[7])
vprint("Weak Perms: ")
# service_info = win32service.QueryServiceConfig2(sh, win32service.SERVICE_CONFIG_DESCRIPTION) # long description of service. not interesting.
# print "Service Perms: " + win32security.ConvertSecurityDescriptorToStringSecurityDescriptor(sd, win32security.SDDL_REVISION_1, win32security.DACL_SECURITY_INFORMATION)
print_weak_perms("file", weak_perms_binary)
if weak_perms_binary:
save_issue("WPC004", "writable_progs", weak_perms_binary)
print_weak_perms("service", weak_perms)
if weak_perms:
save_issue("WPC012", "weak_service_perms", weak_perms)
if verbose == 0:
sys.stdout.write("!")
else:
if verbose == 0:
sys.stdout.write(".")
print
def audit_services():
print
sch = win32service.OpenSCManager(remote_server, None, win32service.SC_MANAGER_ENUMERATE_SERVICE )
try:
# TODO Haven't seen this work - even when running as SYSTEM
sd = win32service.QueryServiceObjectSecurity(sch, win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION)
print check_weak_write_perms_by_sd("Service Manager", 'service_manager', sd)
except:
#print "ERROR: Can't get security descriptor for service manager"
pass
# Need to connect to service (OpenService) with minimum privs to read DACL. Here are our options:
#
# http://www.pinvoke.net/default.aspx/advapi32/OpenSCManager.html?diff=y
# SC_MANAGER_ALL_ACCESS (0xF003F) Includes STANDARD_RIGHTS_REQUIRED, in addition to all access rights in this table.
# SC_MANAGER_CREATE_SERVICE (0x0002) Required to call the CreateService function to create a service object and add it to the database.
# SC_MANAGER_CONNECT (0x0001) Required to connect to the service control manager.
# SC_MANAGER_ENUMERATE_SERVICE (0x0004) Required to call the EnumServicesStatusEx function to list the services that are in the database.
# SC_MANAGER_LOCK (0x0008) Required to call the LockServiceDatabase function to acquire a lock on the database.
# SC_MANAGER_MODIFY_BOOT_CONFIG (0x0020) Required to call the NotifyBootConfigStatus function.
# SC_MANAGER_QUERY_LOCK_STATUS (0x0010)Required to call the QueryServiceLockStatus function to retrieve the lock status information for the database.
# GENERIC_READ
# GENERIC_WRITE
# GENERIC_EXECUTE
# GENERIC_ALL
services = win32service.EnumServicesStatus(sch, win32service.SERVICE_WIN32, win32service.SERVICE_STATE_ALL )
for service in services:
sh = win32service.OpenService(sch, service[0] , win32service.SC_MANAGER_CONNECT )
service_info = win32service.QueryServiceConfig(sh)
binary = None
if remote_server:
print "WARNING: Running agianst remote server. Checking perms of .exe not implemented."
else:
binary = get_binary(service_info[3])
print "-"*64
print("Service: " + handle_unicode(service[0]))
print("Description: " + handle_unicode(service[1]))
print("Binary: " + handle_unicode(service_info[3]))
if binary:
print("Binary (clean): " + binary)
else:
if remote_server:
print("Binary (clean): [N/A Running remotely]")
else:
print("Binary (clean): [Missing Binary/Remote]")
print("Run as: " + service_info[7])
print "\nFile Permissions on executable %s:" % binary
if binary:
dump_perms(binary, 'file', {'brief': 1})
else:
print "WARNING: Can't get full path of binary. Skipping."
print "\nPermissions on service:"
try:
sh = win32service.OpenService(sch, service[0] , win32con.GENERIC_READ )
except:
print "ERROR: OpenService failed"
try:
sd = win32service.QueryServiceObjectSecurity(sh, win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION)
except:
print "ERROR: QueryServiceObjectSecurity didn't get security descriptor for service"
dump_sd("Service \"" + service[1] + "\" (" + service[0] + ") which runs as user \"" + service_info[7] + "\"", 'service', sd, {'brief': 1})
print "\nPermissions on registry data:"
print "WARNING: Not implmented yet"
# service_info = win32service.QueryServiceConfig2(sh, win32service.SERVICE_CONFIG_DESCRIPTION) # long description of service. not interesting.
# print "Service Perms: " + win32security.ConvertSecurityDescriptorToStringSecurityDescriptor(sd, win32security.SDDL_REVISION_1, win32security.DACL_SECURITY_INFORMATION)
print
def vprint(string):
if (verbose):
print string
def get_binary(binary_dirty):
m = re.search('^[\s]*?"([^"]+)"', binary_dirty)
if m and os.path.exists(m.group(1)):
return m.group(1)
else:
if m:
binary_dirty = m.group(1)
chunks = binary_dirty.split(" ")
candidate = ""
for chunk in chunks:
if candidate:
candidate = candidate + " "
candidate = candidate + chunk
if os.path.exists(candidate) and os.path.isfile(candidate):
return candidate
if os.path.exists(candidate + ".exe") and os.path.isfile(candidate + ".exe"):
return candidate + ".exe"
global on64bitwindows
if on64bitwindows:
candidate2 = candidate.replace("system32", "syswow64")
if os.path.exists(candidate2) and os.path.isfile(candidate2):
return candidate2
if os.path.exists(candidate2 + ".exe") and os.path.isfile(candidate2 + ".exe"):
return candidate2 + ".exe"
return None
def print_weak_perms(type, weak_perms, options={}):
brief = 0
if options:
if options['brief']:
brief = 1
for perms in weak_perms:
object_name = perms[0]
domain = perms[1]
principle = perms[2]
perm = perms[3]
if len(perms) == 5:
acl_type = perms[4]
if acl_type == "ALLOW":
acl_type = ""
else:
acl_type = acl_type + " "
else:
acl_type = ""
slash = "\\"
if domain == "":
slash = ""
if brief:
print "\t%s%s%s%s: %s" % (acl_type, domain, slash, principle, perm)
else:
#print repr((acl_type, domain, slash, principle, perm, type, object_name))
print u"\t%s%s%s%s has permission %s on %s %s" % (handle_unicode(acl_type), handle_unicode(domain), handle_unicode(slash), handle_unicode(principle), handle_unicode(perm), handle_unicode(type), handle_unicode(object_name))
def check_path(path, issue_no):
dirs = set(path.split(';'))
exts = ('exe', 'com', 'bat', 'dll') # TODO pl, rb, py, php, inc, asp, aspx, ocx, vbs, more?
for dir in dirs:
weak_flag = 0
weak_perms = check_weak_write_perms(dir, 'directory')
if weak_perms:
save_issue(issue_no, "weak_perms_dir", weak_perms)
print_weak_perms("Directory", weak_perms)
weak_flag = 1
for ext in exts:
for file in glob.glob(dir + '\*.' + ext):
#print "Processing " + file
weak_perms = check_weak_write_perms(file, 'file')
if weak_perms:
save_issue(issue_no, "weak_perms_exe", weak_perms)
print_weak_perms("File", weak_perms)
weak_flag = 1
if weak_flag == 1:
sys.stdout.write("!")
else:
sys.stdout.write(".")
def get_user_paths():
try:
keyh = win32api.RegOpenKeyEx(win32con.HKEY_USERS, None , 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
return 0
paths = []
subkeys = win32api.RegEnumKeyEx(keyh)
for subkey in subkeys:
try:
subkeyh = win32api.RegOpenKeyEx(keyh, subkey[0] + "\\Environment" , 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
pass
else:
subkey_count, value_count, mod_time = win32api.RegQueryInfoKey(subkeyh)
try:
path, type = win32api.RegQueryValueEx(subkeyh, "PATH")
paths.append((subkey[0], path))
except:
pass
return paths
def get_system_path():
# HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment
key_string = 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'
try:
keyh = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, key_string , 0, win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE | win32con.KEY_READ)
except:
return None
try:
path, type = win32api.RegQueryValueEx(keyh, "PATH")
return path
except:
return None
#name=sys.argv[1]
#if not os.path.exists(name):
#print name, "does not exist!"
#sys.exit()
def check_user_paths():
for user_path in get_user_paths():
user_sid_s = user_path[0]
try:
user_sid = win32security.ConvertStringSidToSid(user_sid_s)
principle, domain, type = win32security.LookupAccountSid(remote_server, user_sid)
user_fq = domain + "\\" + principle
except:
print "WARNING: Can't convert sid %s to name. Skipping." % user_sid_s
continue
path = user_path[1]
vprint("Checking path of %s" % user_fq)
global tmp_trusted_principles_fq
tmp_trusted_principles_fq = (user_fq)
check_path(path, "WPC015")
tmp_trusted_principles_fq = ()
def check_current_path():
vprint("Checking current user's PATH")
global tmp_trusted_principles_fq
tmp_trusted_principles_fq = (os.environ['userdomain'] + "\\" + os.environ['username'])
check_path(os.environ['path'], "WPC014")
tmp_trusted_principles_fq = ()
def check_system_path():
vprint("Checking system PATH")
check_path(get_system_path(), "WPC013")
def check_paths():
check_system_path()
check_current_path()
check_user_paths()
print
def check_drives():
for drive in win32api.GetLogicalDriveStrings().split("\x00"):
sys.stdout.write(".")
type = win32file.GetDriveType(drive)
if type == win32con.DRIVE_FIXED:
fs = win32api.GetVolumeInformation(drive)[4]
if fs == 'NTFS':
warning = ""
weak_perms = check_weak_write_perms(drive, 'directory')
if weak_perms:
# print "Weak permissions on drive root %s:" % drive
# print_weak_perms('directory', weak_perms)
sys.stdout.write(".")
save_issue("WPC010", "writable_drive_root", weak_perms)
elif fs == 'FAT':
save_issue_string("WPC011", "fat_fs_drives", "Fixed drive " + drive + ": has " + fs + " filesystem (FAT does not support file permissions)" )
sys.stdout.write("!")
elif fs == 'FAT32':
save_issue_string("WPC011", "fat_fs_drives", "Fixed drive " + drive + ": has " + fs + " filesystem (FAT32 does not support file permissions)" )
sys.stdout.write("!")
else:
warning = " (not NTFS - might be insecure)"
save_issue_string("WPC011", "fat_fs_drives", "Fixed drive " + drive + ": has " + fs + " filesystem (Not NTFS - might not be secure)" )
sys.stdout.write("!")
# print "Fixed drive %s has %s filesystem%s" % (drive, fs, warning)
print
def check_shares():
resume = 0;
try:
(sharelist, total, resume) = win32net.NetShareEnum(None, 502, resume, 9999)
for share in sharelist:
sys.stdout.write(".")
sd = share['security_descriptor']
# print "%s (%s) %s type=%s" % (share['netname'], share['path'], share['remark'], share['type'])
if sd:
weak_perms = check_weak_write_perms_by_sd("Share \"" + share['netname'] + "\" (" + share['path'] + ") ", 'share', sd)
if weak_perms:
save_issue("WPC017", "non_admin_shares", weak_perms)
sys.stdout.write("!")
except:
print "[E] Can't check shares - not enough privs?"
# TODO not option to call this yet
def audit_shares():
print "\n[+] Shares\n"
resume = 0;
try:
(sharelist, total, resume) = win32net.NetShareEnum(remote_server, 502, resume, 999999)
#print win32net.NetShareGetInfo(remote_server, ?, 0) # do we need this?
for share in sharelist:
# Determine type of share
types = []
if share['type'] & getattr(win32netcon, "STYPE_SPECIAL"):
# print "Share type: "
types.append("STYPE_SPECIAL")
share['type'] = share['type'] & 3 # mask off "special"
#print share['type']
for stype in share_types:
if share['type'] == getattr(win32netcon, stype):
types.append(stype)
#print "Share type: " + stype
break
print "---------------"
print "Share: " + share['netname']
print "Path: " + share['path']
print "Remark: " + share['remark']
print "Type(s): " + "|".join(types)
print "Reserved: %s" % share['reserved']
print "Passwd: %s" % share['passwd']
print "Current Uses: %s" % share['current_uses']
print "Max Uses: %s" % share['max_uses']
print "Permissions: %s" % share['permissions']
print "Sec. Desc.: "
dump_sd(share['netname'], 'share', share['security_descriptor'])
except:
print "[E] Couldn't get share information"
print "\n[+] Server Info (NetServerGetInfo 102)\n"
def check_progfiles():
# %ProgramFiles%
# %ProgramFiles(x86)%
prog_dirs = []
# re_exe = re.compile('\.exe$|\.com$|\.bat$|\.dll$', re.IGNORECASE)
exts = ('exe', 'com', 'bat', 'dll') # TODO pl, rb, py, php, inc, asp, aspx, ocx, vbs, more?
if os.getenv('ProgramFiles'):
prog_dirs.append(os.environ['ProgramFiles'])
if os.getenv('ProgramFiles(x86)'):
prog_dirs.append(os.environ['ProgramFiles(x86)'])
dot_count = 0
weak_flag = 0
for prog_dir in prog_dirs:
# print "Looking for programs under %s..." % prog_dir
for root, dirs, files in os.walk(prog_dir):
#print "root=%s, dirs=%s, files=%s" % (root, dirs, files)
# for file in files:
# m = re_exe.search(file)
# if m is None:
# continue
# #print "Checking file %s" % os.path.join(root, file)
# weak_perms = check_weak_write_perms(os.path.join(root, file), 'file')
# if weak_perms:
# print_weak_perms("File", weak_perms)
for file in dirs:
#print "Checking dir %s" % os.path.join(root, file)
weak_perms = check_weak_write_perms(os.path.join(root, file), 'file')
if weak_perms:
#print_weak_perms("Directory", weak_perms)
save_issue("WPC001", "writable_dirs", weak_perms)
weak_flag = 1
dir = file
for ext in exts:
for f in glob.glob(u"%s\\%s\\*.%s" % (handle_unicode(root),handle_unicode(dir),handle_unicode(ext))): #root + "\\" + dir + '\*.' + ext):
#print "Processing " + f
weak_perms = check_weak_write_perms(f, 'file')
if weak_perms:
print_weak_perms("File", weak_perms)
save_issue("WPC001", "writable_progs", weak_perms)
weak_flag = 1
dot_count = dot_count + 1;
# Don't print out all the dots. There are too many!
if dot_count > 10:
if weak_flag == 1:
sys.stdout.write("!")
else:
sys.stdout.write(".")
dot_count = 0;
weak_flag = 0;
print
def check_patches():
# TODO: This is more difficult than I'd hoped. You can't just search for the KB number: XP will appear to be vulnerable to dcom. Need to search for KB number or SP2 in this case.
# from subprocess import Popen, PIPE
patchlist = Popen(["systeminfo"], stdout=PIPE).communicate()[0]
for kb_no in kb_nos:
print "Searching for " + kb_no
if re.search(kb_no, patchlist):
print "found"
def print_section(title):
if (verbose != 0):
print "%s\n%s\n%s\n" % ("="*32,title,"="*32)
else:
sys.stdout.write(title + ": ")
# http://www.daniweb.com/code/snippet216539.html
def int2bin(n):
bStr = ''
if n < 0: n = n + 2^32
if n == 0: return '0'
while n > 0:
bStr = str(n % 2) + bStr
n = n >> 1
return bStr
def impersonate(username, password, domain):
if username:
print "Using alternative credentials:"
print "Username: " + str(username)
print "Password: " + str(password)
print "Domain: " + str(domain)
handle = win32security.LogonUser( username, domain, password, win32security.LOGON32_LOGON_NEW_CREDENTIALS, win32security.LOGON32_PROVIDER_WINNT50 )
win32security.ImpersonateLoggedOnUser( handle )
else:
print "Running as current user. No logon creds supplied (-u, -d, -p)."
print
def audit_passpol():
print
print "[+] NetUserModalsGet 0,1,2,3"
print
try:
data = win32net.NetUserModalsGet(remote_server, 0)
for key in data.keys():
print "%s: %s" % (key, data[key])
data = win32net.NetUserModalsGet(remote_server, 1)
for key in data.keys():
print "%s: %s" % (key, data[key])
data = win32net.NetUserModalsGet(remote_server, 2)
for key in data.keys():
if key == 'domain_id':
print "%s: %s" % (key, win32security.ConvertSidToStringSid(data[key]))
elif key == 'lockout_threshold' and data[key] == '0':
print "%s: %s (accounts aren't locked out)" % (key, data[key])
else:
print "%s: %s" % (key, data[key])
data = win32net.NetUserModalsGet(remote_server, 3)
for key in data.keys():
if key == 'lockout_threshold' and data[key] == 0:
print "%s: %s (accounts aren't locked out)" % (key, data[key])
else:
print "%s: %s" % (key, data[key])
except:
print "[E] Couldn't get NetUserModals data"
# Recursive function to find group members (and the member of any groups in those groups...)
def get_group_members(server, group, depth):
resume = 0
indent = "\t" * depth
members = []
while True:
try:
m, total, resume = win32net.NetLocalGroupGetMembers(server, group, 2, resume, 999999)
except:
break
for member in m:
if member['sidusage'] == 4:
type = "local group"
g = member['domainandname'].split("\\")
print indent + member['domainandname'] + " (" + str(type) + ")"
get_group_members(server, g[1], depth + 1)
elif member['sidusage'] == 2:
type = "domain group"
print indent + member['domainandname'] + " (" + str(type) + ")"
elif member['sidusage'] == 1:
type = "user"
print indent + member['domainandname'] + " (" + str(type) + ")"
else:
type = "type " + str(member['sidusage'])
print indent + member['domainandname'] + " (" + str(type) + ")"
if resume == 0:
break
def audit_admin_users():
print
for group in ("administrators", "domain admins", "enterprise admins"):
print "\n[+] Members of " + group + ":"
get_group_members(remote_server, group, 0)
print
# It might be interesting to look up who has powerful privs, but LsaEnumerateAccountsWithUserRight doesn't seem to work as a low priv user
# SE_ASSIGNPRIMARYTOKEN_NAME TEXT("SeAssignPrimaryTokenPrivilege") Required to assign the primary token of a process. User Right: Replace a process-level token.
# SE_BACKUP_NAME TEXT("SeBackupPrivilege") Required to perform backup operations. This privilege causes the system to grant all read access control to any file, regardless of the access control list (ACL) specified for the file. Any access request other than read is still evaluated with the ACL. This privilege is required by the RegSaveKey and RegSaveKeyExfunctions. The following access rights are granted if this privilege is held: READ_CONTROL ACCESS_SYSTEM_SECURITY FILE_GENERIC_READ FILE_TRAVERSE User Right: Back up files and directories.
# SE_CREATE_PAGEFILE_NAME TEXT("SeCreatePagefilePrivilege") Required to create a paging file. User Right: Create a pagefile.
# SE_CREATE_TOKEN_NAME TEXT("SeCreateTokenPrivilege") Required to create a primary token. User Right: Create a token object.
# SE_DEBUG_NAME TEXT("SeDebugPrivilege") Required to debug and adjust the memory of a process owned by another account. User Right: Debug programs.
# SE_ENABLE_DELEGATION_NAME TEXT("SeEnableDelegationPrivilege") Required to mark user and computer accounts as trusted for delegation. User Right: Enable computer and user accounts to be trusted for delegation.
# SE_LOAD_DRIVER_NAME TEXT("SeLoadDriverPrivilege") Required to load or unload a device driver. User Right: Load and unload device drivers.
# SE_MACHINE_ACCOUNT_NAME TEXT("SeMachineAccountPrivilege") Required to create a computer account. User Right: Add workstations to domain.
# SE_MANAGE_VOLUME_NAME TEXT("SeManageVolumePrivilege") Required to enable volume management privileges. User Right: Manage the files on a volume.
# SE_RELABEL_NAME TEXT("SeRelabelPrivilege") Required to modify the mandatory integrity level of an object. User Right: Modify an object label.
# SE_RESTORE_NAME TEXT("SeRestorePrivilege") Required to perform restore operations. This privilege causes the system to grant all write access control to any file, regardless of the ACL specified for the file. Any access request other than write is still evaluated with the ACL. Additionally, this privilege enables you to set any valid user or group SID as the owner of a file. This privilege is required by the RegLoadKey function. The following access rights are granted if this privilege is held: WRITE_DAC WRITE_OWNER ACCESS_SYSTEM_SECURITY FILE_GENERIC_WRITE FILE_ADD_FILE FILE_ADD_SUBDIRECTORY DELETE User Right: Restore files and directories.
# SE_SHUTDOWN_NAME TEXT("SeShutdownPrivilege") Required to shut down a local system. User Right: Shut down the system.
# SE_SYNC_AGENT_NAME TEXT("SeSyncAgentPrivilege") Required for a domain controller to use the LDAP directory synchronization services. This privilege enables the holder to read all objects and properties in the directory, regardless of the protection on the objects and properties. By default, it is assigned to the Administrator and LocalSystem accounts on domain controllers. User Right: Synchronize directory service data.
# SE_TAKE_OWNERSHIP_NAME TEXT("SeTakeOwnershipPrivilege") Required to take ownership of an object without being granted discretionary access. This privilege allows the owner value to be set only to those values that the holder may legitimately assign as the owner of an object. User Right: Take ownership of files or other objects.
# SE_TCB_NAME TEXT("SeTcbPrivilege") This privilege identifies its holder as part of the trusted computer base. Some trusted protected subsystems are granted this privilege. User Right: Act as part of the operating system.
# SE_TRUSTED_CREDMAN_ACCESS_NAME TEXT("SeTrustedCredManAccessPrivilege") Required to access Credential Manager as a trusted caller. User Right: Access Credential Manager as a trusted caller.
# Need: SE_ENABLE_DELEGATION_NAME, SE_MANAGE_VOLUME_NAME, SE_RELABEL_NAME, SE_SYNC_AGENT_NAME, SE_TRUSTED_CREDMAN_ACCESS_NAME
# ph = win32security.LsaOpenPolicy(remote_server, win32security.POLICY_VIEW_LOCAL_INFORMATION | win32security.POLICY_LOOKUP_NAMES)
# for priv in (SE_ASSIGNPRIMARYTOKEN_NAME, SE_BACKUP_NAME, SE_CREATE_PAGEFILE_NAME, SE_CREATE_TOKEN_NAME, SE_DEBUG_NAME, SE_LOAD_DRIVER_NAME, SE_MACHINE_ACCOUNT_NAME, SE_RESTORE_NAME, SE_SHUTDOWN_NAME, SE_TAKE_OWNERSHIP_NAME, SE_TCB_NAME):
# print "Looking up who has " + priv + "priv"
# try:
# sids = win32security.LsaEnumerateAccountsWithUserRight(ph, priv)
# print sids
# except:
# print "[E] Lookup failed"
def audit_logged_in():
resume = 0
print "\n[+] Logged in users:"
try:
while True:
users, total, resume = win32net.NetWkstaUserEnum(remote_server, 1 , resume , 999999 )
for user in users:
print "User logged in: Logon Server=\"%s\" Logon Domain=\"%s\" Username=\"%s\"" % (user['logon_server'], user['logon_domain'], user['username'])
if resume == 0:
break
except:
print "[E] Failed"
def audit_host_info():
print "\n"
if remote_server:
print "Querying remote server: " + remote_server
# Only works on local host
#win32net.NetGetJoinInformation()
# This looks interesting, but doesn't seem to work. Maybe unsupported legacy api.
#pywintypes.error: (50, 'NetUseEnum', 'The request is not supported.')
#print
#print "[+] Getting Net Use info"
#print
#resume = 0
#use, total, resume = win32net.NetUseEnum(remote_server, 2, resume , 999999 )
#print use
print
print "[+] Workstation Info (NetWkstaGetInfo 102)"
print
try:
#print win32net.NetWkstaGetInfo(remote_server, 100)
#print win32net.NetWkstaGetInfo(remote_server, 101)
serverinfo = win32net.NetWkstaGetInfo(remote_server, 102)
print "Computer Name: %s" % serverinfo['computername']
print "Langroup: %s" % serverinfo['langroup']
print "OS: %s.%s" % (serverinfo['ver_major'], serverinfo['ver_minor'])
print "Logged On Users: %s" % serverinfo['logged_on_users']
print "Lanroot: %s" % serverinfo['lanroot']
if serverinfo['platform_id'] & win32netcon.PLATFORM_ID_NT:
print "Platform: PLATFORM_ID_NT (means NT family, not NT4)"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_OS2:
print "Platform: PLATFORM_ID_OS2"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_DOS:
print "Platform: PLATFORM_ID_DOS"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_OSF:
print "Platform: PLATFORM_ID_OSF"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_VMS:
print "Platform: PLATFORM_ID_VMS"
except:
print "[E] Couldn't get Workstation Info"
print
print "[+] Server Info (NetServerGetInfo 102)"
print
try:
#print "NetServerGetInfo 100" + str(win32net.NetServerGetInfo(remote_server, 100))
#print "NetServerGetInfo 101" + str(win32net.NetServerGetInfo(remote_server, 101))
serverinfo = win32net.NetServerGetInfo(remote_server, 102)
print "Name: %s" % serverinfo['name']
print "Comment: %s" % serverinfo['comment']
print "OS: %s.%s" % (serverinfo['version_major'], serverinfo['version_minor'])
print "Userpath: %s" % serverinfo['userpath']
print "Hidden: %s" % serverinfo['hidden']
if serverinfo['platform_id'] & win32netcon.PLATFORM_ID_NT:
print "Platform: PLATFORM_ID_NT (means NT family, not NT4)"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_OS2:
print "Platform: PLATFORM_ID_OS2"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_DOS:
print "Platform: PLATFORM_ID_DOS"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_OSF:
print "Platform: PLATFORM_ID_OSF"
if serverinfo['platform_id'] == win32netcon.PLATFORM_ID_VMS:
print "Platform: PLATFORM_ID_VMS"
for sv_type in sv_types:
if serverinfo['type'] & getattr(win32netcon, sv_type):
print "Type: " + sv_type
except:
print "[E] Couldn't get Server Info"
print
print "[+] LsaQueryInformationPolicy"
print
try:
ph = win32security.LsaOpenPolicy(remote_server, win32security.POLICY_VIEW_LOCAL_INFORMATION | win32security.POLICY_LOOKUP_NAMES)
print "PolicyDnsDomainInformation:"
print win32security.LsaQueryInformationPolicy(ph, win32security.PolicyDnsDomainInformation)
print "PolicyDnsDomainInformation:"
print win32security.LsaQueryInformationPolicy(ph, win32security.PolicyPrimaryDomainInformation)
print "PolicyPrimaryDomainInformation:"
print win32security.LsaQueryInformationPolicy(ph, win32security.PolicyAccountDomainInformation)
print "PolicyLsaServerRoleInformation:"
print win32security.LsaQueryInformationPolicy(ph, win32security.PolicyLsaServerRoleInformation)
except:
print "[E] Couldn't LsaOpenPolicy"
# DsBindWithCred isn't available from python!
# IADsComputer looks useful, but also isn't implemented:
# http://msdn.microsoft.com/en-us/library/aa705980%28v=VS.85%29.aspx
# The following always seems to fail:
# need a dc hostname as remote_server
# and domain
#try:
# hds = win32security.DsBind(remote_server, remote_domain)
# print "hds: " + hds
# print "DsListDomainsInSite: "+ str(win32security.DsListDomainsInSite(hds))
#except:
# pass
# domain can be null. i think domainguid can be null. sitename null. flags = 0.
# lists roles recognised by the server (fsmo roles?)
# win32security.DsListRoles(hds)
# list misc info for a server
# win32security.DsListInfoForServer(hds, server)
# but how to get a list of sites?
# win32security.DsListServersInSite(hds, site )
# win32security.DsCrackNames(hds, flags , formatOffered , formatDesired , names )
# ...For example, user objects can be identified by SAM account names (Domain\UserName), user principal name ([email protected]), or distinguished name.
print
print "[+] Getting domain controller info"
print
try:
domain = None # TODO: could call of each domain if we had a list
print "PDC: " + win32net.NetGetDCName(remote_server, domain)
# Try to list some domain controllers for the remote host
# There are better ways of doing this, but they don't seem to be available via python!
dc_seen = {}
for filter in (0, 0x00004000, 0x00000080, 0x00001000, 0x00000400, 0x00000040, 0x00000010):
dc_info = win32security.DsGetDcName(remote_server, None, None, None, filter)
if not dc_info['DomainControllerAddress'] in dc_seen:
print "\n[+] Found DC\n"
for k in dc_info:
print k + ": " + str(dc_info[k])
dc_seen[dc_info['DomainControllerAddress']] = 1
print "\nWARNING: Above is not necessarily a complete list of DCs\n"
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0)) # any dc
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00004000)) # not the system we connect to
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00000080)) # pdc
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00001000)) # writeable
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00000400)) # kerberos
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00000040)) # gc
#print "Domain controller: " + str(win32security.DsGetDcName(remote_server, None, None, None, 0x00000010)) # directory service
except:
print "[E] Couldn't get DC info"
# This function sounds very much like what lservers.exe does, but the server name must be None
# according to http://msdn.microsoft.com/en-us/library/aa370623%28VS.85%29.aspx. No use to us.
# print win32net.NetServerEnum(remote_server, 100 or 101, win32netcon.SV_TYPE_ALL, "SOMEDOMAIN.COM", 0, 999999)
def audit_user_group():
try:
ph = win32security.LsaOpenPolicy(remote_server, win32security.POLICY_VIEW_LOCAL_INFORMATION | win32security.POLICY_LOOKUP_NAMES)
except:
pass
print
print "[+] Local Groups"
print
resume = 0
groups = []
while True:
try:
g, total, resume = win32net.NetLocalGroupEnum(remote_server, 0, resume, 999999)
groups = groups + g
if resume == 0:
break
except:
print "[E] NetLocalGroupEnum failed"
break
for group in groups:
members = []
while True:
m, total, resume = win32net.NetLocalGroupGetMembers(remote_server, group['name'], 1, resume, 999999)
for member in m:
members.append(member['name'])
if resume == 0:
break
sid, s, i = win32security.LookupAccountName(remote_server, group['name'])
sid_string = win32security.ConvertSidToStringSid(sid)
print "Group %s has sid %s" % (group['name'], sid_string)
for m in members:
print "Group %s has member: %s" % (group['name'], m)
if verbose:
try:
privs = win32security.LsaEnumerateAccountRights(ph, sid)
for priv in privs:
print "Group %s has privilege: %s" % (group['name'], priv)
except:
print "Group %s: privilege lookup failed " % (group['name'])
print
print "[+] Non-local Groups"
print
resume = 0
groups = []
while True:
try:
g, total, resume = win32net.NetGroupEnum(remote_server, 0, resume, 999999)
groups = groups + g
if resume == 0:
break
except:
print "[E] NetGroupEnum failed"
break
for group in groups:
members = []
while True:
try:
m, total, resume = win32net.NetGroupGetUsers(remote_server, group['name'], 0, resume, 999999)
for member in m:
members.append(member['name'])
if resume == 0:
break
except:
print "[E] NetGroupEnum failed"
break
sid, s, i = win32security.LookupAccountName(remote_server, group['name'])
sid_string = win32security.ConvertSidToStringSid(sid)
print "Group %s has sid %s" % (group['name'], sid_string)
for m in members:
print "Group %s has member: %s" % (group['name'], m)
if verbose:
try:
privs = win32security.LsaEnumerateAccountRights(ph, sid)
for priv in privs:
print "Group %s has privilege: %s" % (group['name'], priv)
except:
print "Group %s has no privileges" % (group['name'])
print "\n[+] Users\n"
resume = 0
users = []
if verbose:
level = 11
else:
level = 0
while True:
try:
# u, total, resume = win32net.NetUserEnum(remote_server, 11, 0, resume, 999999) # lots of user detail
# u, total, resume = win32net.NetUserEnum(remote_server, 0, 0, resume, 999999) # just the username
u, total, resume = win32net.NetUserEnum(remote_server, level, 0, resume, 999999)
for user in u:
if verbose:
for k in user:
if k != 'parms':
print k + "\t: " + str(user[k])
print
users.append(user['name'])
if resume == 0:
break
except:
print "[E] NetUserEnum failed"
break
for user in users:
gprivs = []
sid, s, i = win32security.LookupAccountName(remote_server, user)
sid_string = win32security.ConvertSidToStringSid(sid)
print "User %s has sid %s" % (user, sid_string)
groups = win32net.NetUserGetLocalGroups(remote_server, user, 0)
for group in groups:
gsid, s, i = win32security.LookupAccountName(remote_server, group)
try:
privs = win32security.LsaEnumerateAccountRights(ph, gsid)
gprivs = list(list(gprivs) + list(privs))
except:
pass
print "User %s is in this local group: %s" % (user, group)
group_list = win32net.NetUserGetGroups(remote_server, user)
groups = []
for g in group_list:
groups.append(g[0])
for group in groups:
print "User %s is in this non-local group: %s" % (user, group)
if verbose:
privs = []
try:
privs = win32security.LsaEnumerateAccountRights(ph, sid)
except:
pass
for priv in list(set(list(gprivs) + list(privs))):
print "User %s has privilege %s" % (user, priv)
if verbose:
print
print "[+] Privileges"
print
for priv in windows_privileges:
try:
for s in win32security.LsaEnumerateAccountsWithUserRight(ph, priv):
priv_desc = "NoDesc!"
try:
priv_desc = win32security.LookupPrivilegeDisplayName(remote_server, priv)
except:
pass
name, domain, type = win32security.LookupAccountSid(remote_server, s)
type_string = "unknown_type"
if type == 4:
type_string = "group"
if type == 5:
type_string = "user"
print "Privilege %s (%s) is held by %s\%s (%s)" % (priv, priv_desc, domain, name, type_string)
# print "Privilege %s is held by %s\%s (%s)" % (priv, domain, name, type_string)
except:
#print "Skipping %s - doesn't exist for this platform" % priv
pass
# Main
print "windows-privesc-check v%s (http://pentestmonkey.net/windows-privesc-check)\n" % version
# Process Command Line Options
try:
opts, args = getopt.getopt(sys.argv[1:], "artSDEPRHUOMAFILIehwiWvo:s:u:p:d:", ["help", "verbose", "all_checks", "registry_checks", "path_checks", "service_checks", "services", "drive_checks", "eventlog_checks", "progfiles_checks", "passpol", "process_checks", "share_checks", "user_groups", "processes", "ignore_trusted", "owner_info", "write_perms_only", "domain", "patch_checks", "admin_users", "host_info", "logged_in", "report_file=", "username=", "password=", "domain=", "server="])
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
output = None
for o, a in opts:
if o in ("-a", "--all_checks"):
all_checks = 1
elif o in ("-r", "--registry_checks"):
registry_checks = 1
elif o in ("-t", "--path_checks"):
path_checks = 1
elif o in ("-S", "--service_checks"):
service_checks = 1
elif o in ("-D", "--drive_checks"):
drive_checks = 1
elif o in ("-E", "--eventlog_checks"):
eventlog_checks = 1
elif o in ("-F", "--progfiles_checks"):
progfiles_checks = 1
elif o in ("-R", "--process_checks"):
process_checks = 1
elif o in ("-H", "--share_checks"):
share_checks = 1
# elif o in ("-T", "--patch_checks"):
# patch_checks = 1
elif o in ("-L", "--logged_in_audit"):
logged_in_audit = 1
elif o in ("-U", "--user_group_audit"):
user_group_audit = 1
elif o in ("-P", "--passpol"):
passpol_audit = 1
elif o in ("-A", "--admin_users_audit"):
admin_users_audit = 1
elif o in ("-O", "--process_audit"):
process_audit = 1
elif o in ("-i", "--host_info"):
host_info_audit = 1
elif o in ("-e", "--services"):
service_audit = 1
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-w", "--write_perms_only"):
weak_perms_only = 1
elif o in ("-I", "--ignore_trusted"):
ignore_trusted = 1
elif o in ("-W", "--owner_info"):
owner_info = 1
elif o in ("-v", "--verbose"):
verbose = verbose + 1
elif o in ("-o", "--report_file"):
report_file_name = a
elif o in ("-s", "--server"):
remote_server = a
print "Remote server selected: " + a
elif o in ("-u", "--username"):
remote_username = a
elif o in ("-p", "--password"):
remote_password = a
elif o in ("-d", "--domain"):
remote_domain = a
else:
assert False, "unhandled option"
if all_checks:
registry_checks = 1
path_checks = 1
service_checks = 1
service_audit = 1
drive_checks = 1
eventlog_checks = 1
progfiles_checks = 1
process_checks = 1
share_checks = 1
user_group_audit = 1
passpol_audit = 1
logged_in_audit = 1
admin_users_audit= 1
host_info_audit = 1
patch_checks = 1
process_audit = 1
# Print usage message unless at least on type of check is selected
if not (
registry_checks or
path_checks or
service_checks or
service_audit or
drive_checks or
eventlog_checks or
progfiles_checks or
process_checks or
share_checks or
logged_in_audit or
user_group_audit or
passpol_audit or
admin_users_audit or
host_info_audit or
process_audit or
patch_checks
):
usage()
if report_file_name == None:
report_file_name = "privesc-report-" + socket.gethostname() + ".html"
# Better open the report file now in case there's a permissions problem
REPORT = open(report_file_name,"w")
# Print out scan parameters
print "Audit parameters:"
print "Registry Checks: ....... " + str(registry_checks)
print "PATH Checks: ........... " + str(path_checks)
print "Service Checks: ........ " + str(service_checks)
print "Eventlog Checks: ....... " + str(drive_checks)
print "Program Files Checks: .. " + str(eventlog_checks)
print "Process Checks: ........ " + str(progfiles_checks)
print "Patch Checks: ..........." + str(patch_checks)
print "User/Group Audit: ...... " + str(user_group_audit)
print "Password Policy Audit .. " + str(passpol_audit)
print "Logged-in User Audit ... " + str(logged_in_audit)
print "Admin Users Audit: ..... " + str(admin_users_audit)
print "Host Info Audit: ....... " + str(host_info_audit)
print "Process Audit: ......... " + str(process_audit)
print "Service Audit .......... " + str(service_audit)
print "Ignore Trusted ......... " + str(ignore_trusted)
print "Owner Info ............. " + str(owner_info)
print "Weak Perms Only ........ " + str(weak_perms_only)
print "Verbosity .............. " + str(verbose)
print "Output File: ........... " + report_file_name
print
impersonate(remote_username, remote_password, remote_domain)
# Load win32security
#
# Try to open file and ingore the result. This gets win32security loaded and working.
# We can then turn off WOW64 and call repeatedly. If we turn off WOW64 first,
# win32security will fail to work properly.
try:
sd = win32security.GetNamedSecurityInfo (
".",
win32security.SE_FILE_OBJECT,
win32security.OWNER_SECURITY_INFORMATION | win32security.DACL_SECURITY_INFORMATION
)
except:
# nothing
pass
# Load win32net
#
# NetLocalGroupEnum fails with like under Windows 7 64-bit, but not XP 32-bit:
# pywintypes.error: (127, 'NetLocalGroupEnum', 'The specified procedure could not be found.')
dummy = win32net.NetLocalGroupEnum(None, 0, 0, 1000)
# Disable WOW64 - we WANT to see 32-bit areas of the filesystem
#
# Need to wrap in a try because the following call will error on 32-bit windows
try:
k32.Wow64DisableWow64FsRedirection( ctypes.byref(wow64) )
except:
on64bitwindows = 0
# WOW64 is now disabled, so we can read file permissions without Windows redirecting us from system32 to syswow64
# Run checks
if registry_checks:
print_section("Registry Checks")
check_registry()
if path_checks:
print_section("PATH Checks")
check_paths()
if service_checks:
print_section("Service Checks")
check_services()
if service_audit:
print_section("Service Audit")
audit_services()
if drive_checks:
print_section("Drive Checks")
check_drives()
if eventlog_checks:
print_section("Event Log Checks")
check_event_logs()
if progfiles_checks:
print_section("Program Files Checks")
check_progfiles()
if process_checks:
print_section("Process Checks")
check_processes()
if share_checks:
print_section("Share Checks")
check_shares()
if logged_in_audit:
print_section("Logged-in User Audit")
audit_logged_in()
if user_group_audit:
print_section("User/Group Audit")
audit_user_group()
if passpol_audit:
print_section("Password Policy")
audit_passpol()
if admin_users_audit:
print_section("Admin Users Audit")
audit_admin_users()
if host_info_audit:
print_section("Host Info Audit")
audit_host_info()
if process_audit:
print_section("Process Audit")
audit_processes()
if patch_checks:
print_section("Patch Checks")
check_patches()
# task_name='test_addtask.job'
# ts=pythoncom.CoCreateInstance(taskscheduler.CLSID_CTaskScheduler,None,pythoncom.CLSCTX_INPROC_SERVER,taskscheduler.IID_ITaskScheduler)
# tasks=ts.Enum()
# for task in tasks:
# print task
# print issues
# Generate report
audit_data = {}
audit_data['hostname'] = socket.gethostname()
ver_list = win32api.GetVersionEx(1)
os_ver = str(ver_list[0]) + "." + str(ver_list[1])
# version numbers from http://msdn.microsoft.com/en-us/library/ms724832(VS.85).aspx
if os_ver == "4.0":
os_str = "Windows NT"
if os_ver == "5.0":
os_str = "Windows 2000"
if os_ver == "5.1":
os_str = "Windows XP"
if os_ver == "5.2":
os_str = "Windows 2003"
if os_ver == "6.0":
os_str = "Windows Vista"
if os_ver == "6.0":
os_str = "Windows 2008"
if os_ver == "6.1":
os_str = "Windows 2008 R2"
if os_ver == "6.1":
os_str = "Windows 7"
audit_data['os_name'] = os_str
# print ver_list
# audit_data['os_version'] = str(ver_list[0]) + "." + str(ver_list[1]) + "." + str(ver_list[2]) + " SP" + str(ver_list[5])+ "." + str(ver_list[6])
audit_data['os_version'] = str(ver_list[0]) + "." + str(ver_list[1]) + "." + str(ver_list[2]) + " SP" + str(ver_list[5])
# http://msdn.microsoft.com/en-us/library/ms724429(VS.85).aspx
audit_data['ips'] = local_ips
audit_data['domwkg'] = win32api.GetDomainName()
audit_data['version'] = version
audit_data['datetime'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
audit_data['audit_user'] = os.environ['USERDOMAIN'] + "\\" + os.environ['USERNAME']
audit_data['trusted_users'] = (handle_unicode(p) for p in trusted_principles_fq)
audit_data['trusted_groups'] = (handle_unicode(p) for p in trusted_principles)
audit_data['dangerous_privs'] = 'somedangerous_privs'
REPORT.write(format_issues("html", issue_template, issues))
REPORT.close
print "\n\nReport saved to %s \n" % report_file_name
|
silentsignal/wpc
|
windows-privesc-check.py
|
Python
|
gpl-2.0
| 119,299
|
"""distutils.util
Miscellaneous utility functions -- anything that doesn't fit into
one of the other *util.py modules.
"""
__revision__ = "$Id$"
import sys, os, string, re
from distutils.errors import DistutilsPlatformError
from distutils.dep_util import newer
from distutils.spawn import spawn
from distutils import log
from distutils.errors import DistutilsByteCompileError
def get_platform ():
"""Return a string that identifies the current platform. This is used
mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = string.find(sys.version, prefix)
if i == -1:
return sys.platform
j = string.find(sys.version, ")", i)
look = sys.version[i+len(prefix):j].lower()
if look=='amd64':
return 'win-amd64'
if look=='itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
(osname, host, release, version, machine) = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = string.lower(osname)
osname = string.replace(osname, '/', '')
machine = string.replace(machine, ' ', '_')
machine = string.replace(machine, '/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
from distutils.sysconfig import get_config_vars
cfgvars = get_config_vars()
macver = os.environ.get('MACOSX_DEPLOYMENT_TARGET')
if not macver:
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
if 1:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
macrelease = macver
# Get the system version. Reading this plist is a documented
# way to get the system version (see the documentation for
# the Gestalt Manager)
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
m = re.search(
r'<key>ProductUserVisibleVersion</key>\s*' +
r'<string>(.*?)</string>', f.read())
f.close()
if m is not None:
macrelease = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
if not macver:
macver = macrelease
if macver:
from distutils.sysconfig import get_config_vars
release = macver
osname = "macosx"
if (macrelease + '.') >= '10.4.' and \
'-arch' in get_config_vars().get('CFLAGS', '').strip():
# The universal build will build fat binaries, but not on
# systems before 10.4
#
# Try to detect 4-way universal builds, those have machine-type
# 'universal' instead of 'fat'.
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r"%(archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxint >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
machine = 'ppc'
# See 'i386' case
if sys.maxint >= 2**32:
machine = 'ppc64'
return "%s-%s-%s" % (osname, release, machine)
# get_platform ()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
i.e. split it on '/' and put it back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError, "path '%s' cannot be absolute" % pathname
if pathname[-1] == '/':
raise ValueError, "path '%s' cannot end with '/'" % pathname
paths = string.split(pathname, '/')
while '.' in paths:
paths.remove('.')
if not paths:
return os.curdir
return os.path.join(*paths)
# convert_path ()
def change_root (new_root, pathname):
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
relative, this is equivalent to "os.path.join(new_root,pathname)".
Otherwise, it requires making 'pathname' relative and then joining the
two, which is tricky on DOS/Windows and Mac OS.
"""
if os.name == 'posix':
if not os.path.isabs(pathname):
return os.path.join(new_root, pathname)
else:
return os.path.join(new_root, pathname[1:])
elif os.name == 'nt':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == '\\':
path = path[1:]
return os.path.join(new_root, path)
elif os.name == 'os2':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == os.sep:
path = path[1:]
return os.path.join(new_root, path)
elif os.name == 'mac':
if not os.path.isabs(pathname):
return os.path.join(new_root, pathname)
else:
# Chop off volume name from start of path
elements = string.split(pathname, ":", 1)
pathname = ":" + elements[1]
return os.path.join(new_root, pathname)
else:
raise DistutilsPlatformError, \
"nothing known about platform '%s'" % os.name
_environ_checked = 0
def check_environ ():
"""Ensure that 'os.environ' has all the environment variables we
guarantee that users can use in config files, command-line options,
etc. Currently this includes:
HOME - user's home directory (Unix only)
PLAT - description of the current platform, including hardware
and OS (see 'get_platform()')
"""
global _environ_checked
if _environ_checked:
return
if os.name == 'posix' and 'HOME' not in os.environ:
import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform()
_environ_checked = 1
def subst_vars (s, local_vars):
"""Perform shell/Perl-style variable substitution on 'string'. Every
occurrence of '$' followed by a name is considered a variable, and
variable is substituted by the value found in the 'local_vars'
dictionary, or in 'os.environ' if it's not in 'local_vars'.
'os.environ' is first checked/augmented to guarantee that it contains
certain values: see 'check_environ()'. Raise ValueError for any
variables not found in either 'local_vars' or 'os.environ'.
"""
check_environ()
def _subst (match, local_vars=local_vars):
var_name = match.group(1)
if var_name in local_vars:
return str(local_vars[var_name])
else:
return os.environ[var_name]
try:
return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
except KeyError, var:
raise ValueError, "invalid variable '$%s'" % var
# subst_vars ()
def grok_environment_error (exc, prefix="error: "):
"""Generate a useful error message from an EnvironmentError (IOError or
OSError) exception object. Handles Python 1.5.1 and 1.5.2 styles, and
does what it can to deal with exception objects that don't have a
filename (which happens when the error is due to a two-file operation,
such as 'rename()' or 'link()'. Returns the error message as a string
prefixed with 'prefix'.
"""
# check for Python 1.5.2-style {IO,OS}Error exception objects
if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
if exc.filename:
error = prefix + "%s: %s" % (exc.filename, exc.strerror)
else:
# two-argument functions in posix module don't
# include the filename in the exception object!
error = prefix + "%s" % exc.strerror
else:
error = prefix + str(exc[-1])
return error
# Needed by 'split_quoted()'
_wordchars_re = _squote_re = _dquote_re = None
def _init_regex():
global _wordchars_re, _squote_re, _dquote_re
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
def split_quoted (s):
"""Split a string up according to Unix shell-like rules for quotes and
backslashes. In short: words are delimited by spaces, as long as those
spaces are not escaped by a backslash, or inside a quoted string.
Single and double quotes are equivalent, and the quote characters can
be backslash-escaped. The backslash is stripped from any two-character
escape sequence, leaving only the escaped character. The quote
characters are stripped from any quoted string. Returns a list of
words.
"""
# This is a nice algorithm for splitting up a single string, since it
# doesn't require character-by-character examination. It was a little
# bit of a brain-bender to get it working right, though...
if _wordchars_re is None: _init_regex()
s = string.strip(s)
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = string.lstrip(s[end:])
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError, \
"this can't happen (bad char '%c')" % s[end]
if m is None:
raise ValueError, \
"bad string (mismatched %s quotes?)" % s[end]
(beg, end) = m.span()
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
if pos >= len(s):
words.append(s)
break
return words
# split_quoted ()
def execute (func, args, msg=None, verbose=0, dry_run=0):
"""Perform some action that affects the outside world (eg. by
writing to the filesystem). Such actions are special because they
are disabled by the 'dry_run' flag. This method takes care of all
that bureaucracy for you; all you have to do is supply the
function to call and an argument tuple for it (to embody the
"external action" being performed), and an optional message to
print.
"""
if msg is None:
msg = "%s%r" % (func.__name__, args)
if msg[-2:] == ',)': # correct for singleton tuple
msg = msg[0:-2] + ')'
log.info(msg)
if not dry_run:
func(*args)
def strtobool (val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
"""
val = string.lower(val)
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return 1
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
return 0
else:
raise ValueError, "invalid truth value %r" % (val,)
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(string.join(map(repr, py_files), ",\n") + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError, \
("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
# byte_compile ()
def rfc822_escape (header):
"""Return a version of the string escaped for inclusion in an
RFC-822 header, by ensuring there are 8 spaces space after each newline.
"""
lines = string.split(header, '\n')
header = string.join(lines, '\n' + 8*' ')
return header
|
2ndy/RaspIM
|
usr/lib/python2.6/distutils/util.py
|
Python
|
gpl-2.0
| 21,928
|
#!/usr/bin/python
import socket
HOST = raw_input("enter scanner ip : ")
PORT = 14882
if __name__ == "__main__":
socks = socket.socket()
socks.connect((HOST, PORT))
socks.settimeout(1)
try:
while True:
command = raw_input("# ")
if command != "":
socks.send("%s\n" % command)
try:
data = socks.recv(1024)
print "Received", repr(data)
except socket.timeout:
pass
except KeyboardInterrupt:
pass
except Exception, e:
print e
socks.close()
print "\n"
|
mbouchar/xc2424scan
|
src/xc2424scan/utils/test.py
|
Python
|
gpl-2.0
| 620
|
# -*- coding: utf-8 -*-
from modulefinder import ModuleFinder
from ..path import normpath
def dependencies(fname, root=None):
"""Find all dependencies (i.e. imported modules) from fname without
running it.
If `root` is specified, only modules having __file__ attributes
under this root is included.
This function is quite slow..
"""
assert fname.endswith('.py')
res = set()
finder = ModuleFinder()
try:
finder.run_script(fname)
except:
return []
root = normpath(root, slash='/')
prefix = len(root) + 1 # for trailing slash
for name, mod in finder.modules.iteritems():
if name.startswith('_'):
continue
modpath = normpath(mod.__file__, slash='/')
if modpath.startswith(root):
res.add(modpath[prefix:])
return list(sorted(res))
|
thebjorn/dkcoverage
|
dkcoverage/utils/dependencies.py
|
Python
|
gpl-2.0
| 873
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2011 Nick Hall
# Copyright (C) 2011 Gary Burton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Module that implements the gramplet bar fuctionality.
"""
#-------------------------------------------------------------------------
#
# Set up logging
#
#-------------------------------------------------------------------------
import logging
LOG = logging.getLogger('.grampletbar')
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import time
import os
import configparser
#-------------------------------------------------------------------------
#
# GNOME modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from gramps.gen.const import URL_MANUAL_PAGE, URL_WIKISTRING, VERSION_DIR
from gramps.gen.config import config
from gramps.gen.constfunc import win
from ..managedwindow import ManagedWindow
from ..display import display_help, display_url
from .grampletpane import (AVAILABLE_GRAMPLETS,
GET_AVAILABLE_GRAMPLETS,
GET_GRAMPLET_LIST,
get_gramplet_opts,
get_gramplet_options_by_name,
make_requested_gramplet,
GuiGramplet)
from .undoablebuffer import UndoableBuffer
from ..utils import is_right_click
from ..dialog import QuestionDialog
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
WIKI_HELP_PAGE = URL_WIKISTRING + URL_MANUAL_PAGE + '_-_Gramplets'
WIKI_HELP_GRAMPLETBAR = URL_WIKISTRING + URL_MANUAL_PAGE + '_-_Main_Window#Gramplet_Bar_Menu'
WIKI_HELP_ABOUT_GRAMPLETS = URL_WIKISTRING + URL_MANUAL_PAGE + '_-_Gramplets#What_is_a_Gramplet'
NL = "\n"
#-------------------------------------------------------------------------
#
# GrampletBar class
#
#-------------------------------------------------------------------------
class GrampletBar(Gtk.Notebook):
"""
A class which defines the graphical representation of the GrampletBar.
"""
def __init__(self, dbstate, uistate, pageview, configfile, defaults):
Gtk.Notebook.__init__(self)
self.dbstate = dbstate
self.uistate = uistate
self.pageview = pageview
self.configfile = os.path.join(VERSION_DIR, "%s.ini" % configfile)
self.defaults = defaults
self.detached_gramplets = []
self.empty = False
self.close_buttons = []
self.set_group_name("grampletbar")
self.set_show_border(False)
self.set_scrollable(True)
book_button = Gtk.Button()
# Arrow is too small unless in a box
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
arrow = Gtk.Arrow(arrow_type=Gtk.ArrowType.DOWN,
shadow_type=Gtk.ShadowType.NONE)
arrow.show()
box.add(arrow)
box.show()
book_button.add(box)
book_button.set_relief(Gtk.ReliefStyle.NONE)
book_button.connect('clicked', self.__button_clicked)
book_button.set_property("tooltip-text", _("Gramplet Bar Menu"))
book_button.show()
self.set_action_widget(book_button, Gtk.PackType.END)
self.connect('page-added', self.__page_added)
self.connect('page-removed', self.__page_removed)
self.connect('create-window', self.__create_window)
config_settings, opts_list = self.__load(defaults)
opts_list.sort(key=lambda opt: opt["page"])
for opts in opts_list:
if opts["name"] in AVAILABLE_GRAMPLETS():
all_opts = get_gramplet_opts(opts["name"], opts)
gramplet = make_requested_gramplet(TabGramplet, self, all_opts,
self.dbstate, self.uistate)
if gramplet:
self.__add_tab(gramplet)
if len(opts_list) == 0:
self.empty = True
self.__create_empty_tab()
if config_settings[0]:
self.show()
self.set_current_page(config_settings[1])
uistate.connect('grampletbar-close-changed', self.cb_close_changed)
# Connect after gramplets added to prevent making them active
self.connect('switch-page', self.__switch_page)
def _get_config_setting(self, configparser, section, setting, fn=None):
"""
Get a section.setting value from the config parser.
Takes a configparser instance, a section, a setting, and
optionally a post-processing function (typically int).
Always returns a value of the appropriate type.
"""
value = ""
try:
value = configparser.get(section, setting)
value = value.strip()
if fn:
value = fn(value)
except:
if fn:
value = fn()
else:
value = ""
return value
def __load(self, defaults):
"""
Load the gramplets from the configuration file.
"""
retval = []
visible = True
default_page = 0
filename = self.configfile
if filename and os.path.exists(filename):
cp = configparser.ConfigParser()
try:
cp.read(filename, encoding='utf-8')
except:
pass
for sec in cp.sections():
if sec == "Bar Options":
if "visible" in cp.options(sec):
visible = self._get_config_setting(cp, sec, "visible") == "True"
if "page" in cp.options(sec):
default_page = self._get_config_setting(cp, sec, "page", int)
else:
data = {}
for opt in cp.options(sec):
if opt.startswith("data["):
temp = data.get("data", {})
#temp.append(self._get_config_setting(cp, sec, opt))
pos = int(opt[5:-1])
temp[pos] = self._get_config_setting(cp, sec, opt)
data["data"] = temp
else:
data[opt] = self._get_config_setting(cp, sec, opt)
if "data" in data:
data["data"] = [data["data"][key]
for key in sorted(data["data"].keys())]
if "name" not in data:
data["name"] = "Unnamed Gramplet"
data["tname"] = _("Unnamed Gramplet")
retval.append(data)
else:
# give defaults as currently known
for name in defaults:
if name in AVAILABLE_GRAMPLETS():
retval.append(GET_AVAILABLE_GRAMPLETS(name))
return ((visible, default_page), retval)
def __save(self):
"""
Save the gramplet configuration.
"""
filename = self.configfile
try:
with open(filename, "w", encoding='utf-8') as fp:
fp.write(";; Gramplet bar configuration file" + NL)
fp.write((";; Automatically created at %s" %
time.strftime("%Y/%m/%d %H:%M:%S")) + NL + NL)
fp.write("[Bar Options]" + NL)
fp.write(("visible=%s" + NL) % self.get_property('visible'))
fp.write(("page=%d" + NL) % self.get_current_page())
fp.write(NL)
if self.empty:
gramplet_list = []
else:
gramplet_list = [self.get_nth_page(page_num)
for page_num in range(self.get_n_pages())]
for page_num, gramplet in enumerate(gramplet_list):
opts = get_gramplet_options_by_name(gramplet.gname)
if opts is not None:
base_opts = opts.copy()
for key in base_opts:
if key in gramplet.__dict__:
base_opts[key] = gramplet.__dict__[key]
fp.write(("[%s]" + NL) % gramplet.gname)
for key in base_opts:
if key in ["content", "title", "tname", "row", "column",
"page", "version", "gramps"]: # don't save
continue
elif key == "data":
if not isinstance(base_opts["data"], (list, tuple)):
fp.write(("data[0]=%s" + NL) % base_opts["data"])
else:
cnt = 0
for item in base_opts["data"]:
fp.write(("data[%d]=%s" + NL) % (cnt, item))
cnt += 1
else:
fp.write(("%s=%s" + NL)% (key, base_opts[key]))
fp.write(("page=%d" + NL) % page_num)
fp.write(NL)
except IOError:
LOG.warning("Failed writing '%s'; gramplets not saved" % filename)
return
def set_active(self):
"""
Called with the view is set as active.
"""
if not self.empty:
gramplet = self.get_nth_page(self.get_current_page())
if gramplet and gramplet.pui:
gramplet.pui.active = True
if gramplet.pui.dirty:
gramplet.pui.update()
def set_inactive(self):
"""
Called with the view is set as inactive.
"""
if not self.empty:
gramplet = self.get_nth_page(self.get_current_page())
if gramplet and gramplet.pui:
gramplet.pui.active = False
def on_delete(self):
"""
Called when the view is closed.
"""
list(map(self.__dock_gramplet, self.detached_gramplets))
if not self.empty:
for page_num in range(self.get_n_pages()):
gramplet = self.get_nth_page(page_num)
# this is the only place where the gui runs user code directly
if gramplet.pui:
gramplet.pui.on_save()
self.__save()
def add_gramplet(self, gname):
"""
Add a gramplet by name.
"""
if self.has_gramplet(gname):
return
all_opts = get_gramplet_options_by_name(gname)
gramplet = make_requested_gramplet(TabGramplet, self, all_opts,
self.dbstate, self.uistate)
if not gramplet:
LOG.warning("Problem creating '%s'", gname)
return
page_num = self.__add_tab(gramplet)
self.set_current_page(page_num)
def remove_gramplet(self, gname):
"""
Remove a gramplet by name.
"""
for gramplet in self.detached_gramplets:
if gramplet.gname == gname:
self.__dock_gramplet(gramplet)
self.remove_page(self.page_num(gramplet))
return
for page_num in range(self.get_n_pages()):
gramplet = self.get_nth_page(page_num)
if gramplet.gname == gname:
self.remove_page(page_num)
return
def has_gramplet(self, gname):
"""
Return True if the GrampletBar contains the gramplet, else False.
"""
return gname in self.all_gramplets()
def all_gramplets(self):
"""
Return a list of names of all the gramplets in the GrampletBar.
"""
if self.empty:
return self.detached_gramplets
else:
return [gramplet.gname for gramplet in self.get_children() +
self.detached_gramplets]
def restore(self):
"""
Restore the GrampletBar to its default gramplets.
"""
list(map(self.remove_gramplet, self.all_gramplets()))
list(map(self.add_gramplet, self.defaults))
self.set_current_page(0)
def __create_empty_tab(self):
"""
Create an empty tab to be displayed when the GrampletBar is empty.
"""
tab_label = Gtk.Label(label=_('Gramplet Bar'))
tab_label.show()
msg = _('Select the down arrow on the right corner for adding, removing or restoring gramplets.')
content = Gtk.Label(label=msg)
content.set_halign(Gtk.Align.START)
content.set_line_wrap(True)
content.set_size_request(150, -1)
content.show()
self.append_page(content, tab_label)
return content
def __add_tab(self, gramplet):
"""
Add a tab to the notebook for the given gramplet.
"""
label = self.__create_tab_label(gramplet)
page_num = self.append_page(gramplet, label)
return page_num
def __create_tab_label(self, gramplet):
"""
Create a tab label consisting of a label and a close button.
"""
tablabel = TabLabel(gramplet, self.__delete_clicked)
if hasattr(gramplet.pui, "has_data"):
tablabel.set_has_data(gramplet.pui.has_data)
else: # just a function; always show yes it has data
tablabel.set_has_data(True)
if config.get('interface.grampletbar-close'):
tablabel.use_close(True)
else:
tablabel.use_close(False)
return tablabel
def cb_close_changed(self):
"""
Close button preference changed.
"""
for gramplet in self.get_children():
tablabel = self.get_tab_label(gramplet)
if not isinstance(tablabel, Gtk.Label):
tablabel.use_close(config.get('interface.grampletbar-close'))
def __delete_clicked(self, button, gramplet):
"""
Called when the delete button is clicked.
"""
page_num = self.page_num(gramplet)
self.remove_page(page_num)
def __switch_page(self, notebook, unused, new_page):
"""
Called when the user has switched to a new GrampletBar page.
"""
old_page = notebook.get_current_page()
if old_page >= 0:
gramplet = self.get_nth_page(old_page)
if gramplet and gramplet.pui:
gramplet.pui.active = False
gramplet = self.get_nth_page(new_page)
if not self.empty:
if gramplet and gramplet.pui:
gramplet.pui.active = True
if gramplet.pui.dirty:
gramplet.pui.update()
def __page_added(self, notebook, unused, new_page):
"""
Called when a new page is added to the GrampletBar.
"""
gramplet = self.get_nth_page(new_page)
if self.empty:
if isinstance(gramplet, TabGramplet):
self.empty = False
if new_page == 0:
self.remove_page(1)
else:
self.remove_page(0)
else:
return
gramplet.pane = self
label = self.__create_tab_label(gramplet)
self.set_tab_label(gramplet, label)
self.set_tab_reorderable(gramplet, True)
self.set_tab_detachable(gramplet, True)
if gramplet in self.detached_gramplets:
self.detached_gramplets.remove(gramplet)
self.reorder_child(gramplet, gramplet.page)
def __page_removed(self, notebook, unused, page_num):
"""
Called when a page is removed to the GrampletBar.
"""
if self.get_n_pages() == 0:
self.empty = True
self.__create_empty_tab()
def __create_window(self, grampletbar, gramplet, x_pos, y_pos):
"""
Called when the user has switched to a new GrampletBar page.
"""
gramplet.page = self.page_num(gramplet)
self.detached_gramplets.append(gramplet)
win = DetachedWindow(grampletbar, gramplet, x_pos, y_pos)
gramplet.detached_window = win
return win.get_notebook()
def __dock_gramplet(self, gramplet):
"""
Dock a detached gramplet.
"""
gramplet.detached_window.close()
gramplet.detached_window = None
def __button_clicked(self, button):
"""
Called when the drop-down button is clicked.
"""
self.menu = Gtk.Menu()
menu = self.menu
ag_menu = Gtk.MenuItem(label=_('Add a gramplet'))
nav_type = self.pageview.navigation_type()
skip = self.all_gramplets()
gramplet_list = GET_GRAMPLET_LIST(nav_type, skip)
gramplet_list.sort()
self.__create_submenu(ag_menu, gramplet_list, self.__add_clicked)
ag_menu.show()
menu.append(ag_menu)
if not (self.empty or config.get('interface.grampletbar-close')):
rg_menu = Gtk.MenuItem(label=_('Remove a gramplet'))
gramplet_list = [(gramplet.title, gramplet.gname)
for gramplet in self.get_children() +
self.detached_gramplets]
gramplet_list.sort()
self.__create_submenu(rg_menu, gramplet_list,
self.__remove_clicked)
rg_menu.show()
menu.append(rg_menu)
rd_menu = Gtk.MenuItem(label=_('Restore default gramplets'))
rd_menu.connect("activate", self.__restore_clicked)
rd_menu.show()
menu.append(rd_menu)
# Separator.
rs_menu = Gtk.SeparatorMenuItem()
rs_menu.show()
menu.append(rs_menu)
rh_menu = Gtk.MenuItem(label=_('Gramplet Bar Help'))
rh_menu.connect("activate", self.on_help_grampletbar_clicked)
rh_menu.show()
menu.append(rh_menu)
rg_menu = Gtk.MenuItem(label=_('About Gramplets'))
rg_menu.connect("activate", self.on_help_gramplets_clicked)
rg_menu.show()
menu.append(rg_menu)
menu.show_all()
menu.popup(None, None, cb_menu_position, button, 0, 0)
def __create_submenu(self, main_menu, gramplet_list, callback_func):
"""
Create a submenu of the context menu.
"""
if main_menu:
submenu = main_menu.get_submenu()
submenu = Gtk.Menu()
for entry in gramplet_list:
item = Gtk.MenuItem(label=entry[0])
item.connect("activate", callback_func, entry[1])
item.show()
submenu.append(item)
main_menu.set_submenu(submenu)
def __add_clicked(self, menu, gname):
"""
Called when a gramplet is added from the context menu.
"""
self.add_gramplet(gname)
def __remove_clicked(self, menu, gname):
"""
Called when a gramplet is removed from the context menu.
"""
self.remove_gramplet(gname)
def __restore_clicked(self, menu):
"""
Called when restore defaults is clicked from the context menu.
"""
QuestionDialog(
_("Restore to defaults?"),
_("The gramplet bar will be restored to contain its default "
"gramplets. This action cannot be undone."),
_("OK"),
self.restore,
parent=self.uistate.window)
def get_config_funcs(self):
"""
Return a list of configuration functions.
"""
funcs = []
if self.empty:
gramplets = []
else:
gramplets = self.get_children()
for gramplet in gramplets + self.detached_gramplets:
gui_options = gramplet.make_gui_options()
if gui_options:
funcs.append(self.__build_panel(gramplet.title, gui_options))
return funcs
def __build_panel(self, title, gui_options):
"""
Return a configuration function that returns the title of a page in
the Configure View dialog and a gtk container defining the page.
"""
def gramplet_panel(configdialog):
return title, gui_options
return gramplet_panel
def on_help_grampletbar_clicked(self, dummy):
""" Button: Display the relevant portion of Gramps manual"""
display_url(WIKI_HELP_GRAMPLETBAR)
def on_help_gramplets_clicked(self, dummy):
""" Button: Display the relevant portion of Gramps manual"""
display_url(WIKI_HELP_ABOUT_GRAMPLETS)
#-------------------------------------------------------------------------
#
# TabGramplet class
#
#-------------------------------------------------------------------------
class TabGramplet(Gtk.ScrolledWindow, GuiGramplet):
"""
Class that handles the plugin interfaces for the GrampletBar.
"""
def __init__(self, pane, dbstate, uistate, title, **kwargs):
"""
Internal constructor for GUI portion of a gramplet.
"""
Gtk.ScrolledWindow.__init__(self)
GuiGramplet.__init__(self, pane, dbstate, uistate, title, **kwargs)
self.scrolledwindow = self
self.textview = Gtk.TextView()
self.textview.set_editable(False)
self.textview.set_wrap_mode(Gtk.WrapMode.WORD)
self.buffer = UndoableBuffer()
self.text_length = 0
self.textview.set_buffer(self.buffer)
self.textview.connect("key-press-event", self.on_key_press_event)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.add(self.textview)
self.show_all()
self.track = []
def get_title(self):
return self.title
def get_container_widget(self):
"""
Return the top level container widget.
"""
return self
#-------------------------------------------------------------------------
#
# DetachedWindow class
#
#-------------------------------------------------------------------------
class DetachedWindow(ManagedWindow):
"""
Class for showing a detached gramplet.
"""
def __init__(self, grampletbar, gramplet, x_pos, y_pos):
"""
Construct the window.
"""
self.title = gramplet.title + " " + _("Gramplet")
self.grampletbar = grampletbar
self.gramplet = gramplet
ManagedWindow.__init__(self, gramplet.uistate, [], self.title)
dlg = Gtk.Dialog(transient_for=gramplet.uistate.window,
destroy_with_parent = True)
dlg.add_button(_('_Close'), Gtk.ResponseType.CLOSE)
self.set_window(dlg, None, self.title)
self.window.move(x_pos, y_pos)
self.window.set_default_size(gramplet.detached_width,
gramplet.detached_height)
self.window.add_button(_('_Help'), Gtk.ResponseType.HELP)
self.window.connect('response', self.handle_response)
self.notebook = Gtk.Notebook()
self.notebook.set_show_tabs(False)
self.notebook.set_show_border(False)
self.notebook.connect('page-added', self.page_added)
self.notebook.show()
self.window.vbox.pack_start(self.notebook, True, True, 0)
self.show()
def page_added(self, notebook, gramplet, page_num):
"""
Called when the gramplet is added to the notebook. This takes the
focus from the help button (bug #6306).
"""
gramplet.grab_focus()
def handle_response(self, object, response):
"""
Callback for taking care of button clicks.
"""
if response == Gtk.ResponseType.CLOSE:
self.close()
elif response == Gtk.ResponseType.HELP:
# translated name:
if self.gramplet.help_url:
if self.gramplet.help_url.startswith("http://"):
display_url(self.gramplet.help_url)
else:
display_help(self.gramplet.help_url)
else:
display_help(WIKI_HELP_PAGE,
self.gramplet.tname.replace(" ", "_"))
def get_notebook(self):
"""
Return the notebook.
"""
return self.notebook
def build_menu_names(self, obj):
"""
Part of the Gramps window interface.
"""
return (self.title, 'Gramplet')
def get_title(self):
"""
Returns the window title.
"""
return self.title
def close(self, *args):
"""
Dock the detached gramplet back in the GrampletBar from where it came.
"""
size = self.window.get_size()
self.gramplet.detached_width = size[0]
self.gramplet.detached_height = size[1]
self.gramplet.detached_window = None
self.notebook.remove(self.gramplet)
self.grampletbar.add(self.gramplet)
ManagedWindow.close(self, *args)
#-------------------------------------------------------------------------
#
# TabLabel class
#
#-------------------------------------------------------------------------
class TabLabel(Gtk.Box):
"""
Create a tab label consisting of a label and a close button.
"""
def __init__(self, gramplet, callback):
Gtk.Box.__init__(self)
self.text = gramplet.title
self.set_spacing(4)
self.label = Gtk.Label()
self.label.set_tooltip_text(gramplet.tname)
self.label.show()
self.closebtn = Gtk.Button()
image = Gtk.Image()
image.set_from_icon_name('window-close', Gtk.IconSize.MENU)
self.closebtn.connect("clicked", callback, gramplet)
self.closebtn.set_image(image)
self.closebtn.set_relief(Gtk.ReliefStyle.NONE)
self.pack_start(self.label, True, True, 0)
self.pack_end(self.closebtn, False, False, 0)
def set_has_data(self, has_data):
"""
Set the label to indicate if the gramplet has data.
"""
if has_data:
self.label.set_text("<b>%s</b>" % self.text)
self.label.set_use_markup(True)
else:
self.label.set_text(self.text)
def use_close(self, use_close):
"""
Display the cose button according to user preference.
"""
if use_close:
self.closebtn.show()
else:
self.closebtn.hide()
def cb_menu_position(*args):
"""
Determine the position of the popup menu.
"""
# takes two argument: menu, button
if len(args) == 2:
menu = args[0]
button = args[1]
# broken introspection can't handle MenuPositionFunc annotations corectly
else:
menu = args[0]
button = args[3]
ret_val, x_pos, y_pos = button.get_window().get_origin()
x_pos += button.get_allocation().x
y_pos += button.get_allocation().y + button.get_allocation().height
return (x_pos, y_pos, False)
|
gramps-project/gramps
|
gramps/gui/widgets/grampletbar.py
|
Python
|
gpl-2.0
| 28,336
|
#!/usr/bin/env python
#Run the modularized application
from ufb import app, db
if __name__ == "__main__":
app.debug = True
db.create_all(app=app)
app.run(debug=True)
|
mstrisoline/ufb
|
run.py
|
Python
|
gpl-2.0
| 181
|
# buttons code by Ryan Kulla, [email protected]
import gl
import pygame.font
from pygame.display import update
def imgv_button(screen, msg, x, y, where):
font = pygame.font.Font(gl.FONT_NAME, 10)
if gl.BEING_HOVERED:
ren = font.render(msg, 1, gl.BUTTON_TEXTHOVERCOLOR, gl.BUTTON_HOVERCOLOR)
ren_rect = do_button(screen, ren, where, x, y)
else:
ren = font.render(msg, 1, gl.BUTTON_TEXTCOLOR, gl.BUTTON_BGCOLOR)
ren_rect = do_button(screen, ren, where, x, y)
return ren_rect
def do_button(screen, ren, where, x, y):
ren_rect = ren.get_rect().inflate(20, 10)
if where == "topleft":
ren_rect.topleft = screen.get_rect().topleft
if x != None:
ren_rect[0] = x
if y != None:
ren_rect[1] = y
if where == "midtop":
ren_rect.midtop = screen.get_rect().midtop
if y != None:
ren_rect[1] = y
if where == "topright":
ren_rect.topright = screen.get_rect().topright
if x != None:
ren_rect[0] = ren_rect[0] - x
if y != None:
ren_rect[1] = y
if where == None:
if x != None:
ren_rect[0] = x
if y != None:
ren_rect[1] = ren_rect[1] + y
screen.blit(ren, ren_rect.inflate(-20, -10))
update(ren_rect)
return ren_rect
def hover_button(rect, cursor, screen, msg, x, y, where):
if rect.collidepoint(cursor):
gl.BEING_HOVERED = 1
imgv_button(screen, msg, x, y, where)
else:
gl.BEING_HOVERED = 0
imgv_button(screen, msg, x, y, where)
def close_button(screen):
close_font = pygame.font.Font(gl.FONT_NAME, 15)
close_font.set_bold(1)
close = close_font.render("X", 1, gl.CLOSE_BUTTONCOLOR)
close_rect = close.get_rect()
close_rect[0] = screen.get_width() - 20
screen.blit(close, close_rect)
pygame.display.update(close_rect)
return (close_rect, close_font)
|
rkulla/imgv
|
imgv/buttons.py
|
Python
|
gpl-2.0
| 1,950
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from types import *
def typer(x,y):
if type(x) is StringType or type(y) is StringType :
print u'получена строка'
else:
if x > y:
print u'больше'
elif x < y:
print u'меньше'
else:
print u'равно'
typer("12", 4)
typer("12","4")
typer(12, 4)
typer(4, 45)
typer(4, 4)
|
pybursa/homeworks
|
a_karnauh/hw1/6.py
|
Python
|
gpl-2.0
| 354
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.vcard_enabled'
db.add_column(u'aldryn_people_person', 'vcard_enabled',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.vcard_enabled'
db.delete_column(u'aldryn_people_person', 'vcard_enabled')
models = {
u'aldryn_people.group': {
'Meta': {'object_name': 'Group'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'aldryn_people.grouptranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'GroupTranslation', 'db_table': "u'aldryn_people_group_translation'"},
'company_description': ('djangocms_text_ckeditor.fields.HTMLField', [], {'blank': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Group']"})
},
u'aldryn_people.peopleplugin': {
'Meta': {'object_name': 'PeoplePlugin', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'group_by_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'people': ('sortedm2m.fields.SortedManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['aldryn_people.Person']", 'null': 'True', 'blank': 'True'}),
'show_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "'standard'", 'max_length': '50'})
},
u'aldryn_people.person': {
'Meta': {'object_name': 'Person'},
'email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['aldryn_people.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'vcard_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'visual': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'aldryn_people.persontranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'PersonTranslation', 'db_table': "u'aldryn_people_person_translation'"},
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'function': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Person']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['aldryn_people']
|
Venturi/cms
|
env/lib/python2.7/site-packages/aldryn_people/south_migrations/0013_auto__add_field_person_vcard_enabled.py
|
Python
|
gpl-2.0
| 14,400
|
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2009
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 5, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: [email protected]
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import openwns.module
import openwns.pyconfig
import openwns.node
import openwns.Buffer
import openwns.ARQ
import openwns.CRC
import openwns.Probe
import openwns.FUN
import openwns.logger
import openwns.SAR
import openwns.Tools
import openwns.Multiplexer
import glue.Glue
import glue.Trigger
import glue.Routing
import glue.BERMeasurementReporting
class CSMACAComponent(glue.Glue.Component):
"""Component with CSMA/CA MAC
This configuration contains (in addtion to lowerConvergence and
upperConvergence) a dropping buffer with configurable size, a
Stop-and-Wait ARQ and CRC in order to throw away broken
packets. Furthermore a CSMA/CA MAC controls the medium access"""
arq = None
def __init__(self, node, name, phyDataTransmission, phyNotification, phyDataTransmissionFeedbackName, bufferSize = 500*1024*8):
super(CSMACAComponent, self).__init__(node, name, phyDataTransmission, phyNotification)
# probes
perProbe = openwns.Probe.ErrorRate(
name = "errorRate",
prefix = "glue.packet",
errorRateProvider = "lowerConvergence",
commandName = "packetErrorRate")
# create Buffer, ARQ and CRC
unicastBuffer = openwns.FUN.Node("unicastBuffer", openwns.Buffer.Dropping(
sizeUnit = 'Bit',
size = bufferSize,
lossRatioProbeName = 'glue.unicastBufferLoss',
sizeProbeName = 'glue.unicastBufferSize'))
broadcastBuffer = openwns.FUN.Node("broadcastBuffer", openwns.Buffer.Dropping(
sizeUnit = 'Bit',
size = bufferSize,
lossRatioProbeName = 'glue.broadcastBufferLoss',
sizeProbeName = 'glue.broadcastBufferSize'))
self.arq = openwns.FUN.Node("arq", glue.Glue.StopAndWait(
phyDataTransmissionFeedbackName = phyDataTransmissionFeedbackName,
phyNotification = phyNotification,
parentLogger = self.logger,
# We wait at least SIFS+SlotTime
shortResendTimeout = 25E-6,
longResendTimeout = 44E-6))
# CRC with 32 Bit (802.11)
crc = openwns.FUN.Node("crc", openwns.CRC.CRC("lowerConvergence", lossRatioProbeName='glue.crcLoss', CRCsize = 4*8))
# 24 Byte header (802.11)
overhead = openwns.Tools.Overhead(overhead = 24*8, commandName = "overhead")
csmaCAMAC = glue.Glue.CSMACAMAC(commandName = "csmaCAMAC", stopAndWaitARQName = self.arq.commandName, phyNotification = self.phyNotification, parentLogger = self.logger)
self.lowerConvergence = openwns.FUN.Node(
"lowerConvergence",
glue.Glue.Lower2Copper(unicastRouting = self.unicastUpperConvergence.commandName,
broadcastRouting = self.broadcastUpperConvergence.commandName,
blocking = False,
parentLogger = self.logger,
enabled = self.loggerEnabled))
# add Buffer, ARQ and CRC to fun
self.fun.add(unicastBuffer)
self.fun.add(broadcastBuffer)
self.fun.add(self.arq)
self.fun.add(crc)
self.fun.add(csmaCAMAC)
self.fun.add(overhead)
self.fun.add(self.lowerConvergence)
# connect unicast path
self.unicastUpperConvergence.connect(unicastBuffer)
unicastBuffer.connect(self.arq)
self.arq.connect(self.dispatcher)
# connect broadcast path
self.broadcastUpperConvergence.connect(broadcastBuffer)
broadcastBuffer.connect(self.dispatcher)
# connect common path
self.dispatcher.connect(crc)
crc.connect(csmaCAMAC)
csmaCAMAC.connect(overhead)
overhead.connect(self.lowerConvergence)
|
creasyw/IMTAphy
|
modules/dll/glue/PyConfig/glue/support/CSMACA.py
|
Python
|
gpl-2.0
| 4,953
|
import unittest
import Milter
import sample
import template
import mime
import zipfile
from Milter.test import TestBase
from Milter.testctx import TestCtx
class TestMilter(TestBase,sample.sampleMilter):
def __init__(self):
TestBase.__init__(self)
sample.sampleMilter.__init__(self)
class BMSMilterTestCase(unittest.TestCase):
def setUp(self):
self.zf = zipfile.ZipFile('test/virus.zip','r')
self.zf.setpassword(b'denatured')
def tearDown(self):
self.zf.close()
self.zf = None
def testTemplate(self,fname='test2'):
ctx = TestCtx()
Milter.factory = template.myMilter
ctx._setsymval('{auth_authen}','batman')
ctx._setsymval('{auth_type}','batcomputer')
ctx._setsymval('j','mailhost')
count = 10
while count > 0:
rc = ctx._connect(helo='milter-template.example.org')
self.assertEquals(rc,Milter.CONTINUE)
with open('test/'+fname,'rb') as fp:
rc = ctx._feedFile(fp)
milter = ctx.getpriv()
self.assertFalse(ctx._bodyreplaced,"Message body replaced")
ctx._close()
count -= 1
def testHeader(self,fname='utf8'):
ctx = TestCtx()
Milter.factory = sample.sampleMilter
ctx._setsymval('{auth_authen}','batman')
ctx._setsymval('{auth_type}','batcomputer')
ctx._setsymval('j','mailhost')
rc = ctx._connect()
self.assertEquals(rc,Milter.CONTINUE)
with open('test/'+fname,'rb') as fp:
rc = ctx._feedFile(fp)
milter = ctx.getpriv()
self.assertFalse(ctx._bodyreplaced,"Message body replaced")
fp = ctx._body
with open('test/'+fname+".tstout","wb") as ofp:
ofp.write(fp.getvalue())
ctx._close()
def testCtx(self,fname='virus1'):
ctx = TestCtx()
Milter.factory = sample.sampleMilter
ctx._setsymval('{auth_authen}','batman')
ctx._setsymval('{auth_type}','batcomputer')
ctx._setsymval('j','mailhost')
rc = ctx._connect()
self.assertTrue(rc == Milter.CONTINUE)
with self.zf.open(fname) as fp:
rc = ctx._feedFile(fp)
milter = ctx.getpriv()
# self.assertTrue(milter.user == 'batman',"getsymval failed: "+
# "%s != %s"%(milter.user,'batman'))
self.assertEquals(milter.user,'batman')
self.assertTrue(milter.auth_type != 'batcomputer',"setsymlist failed")
self.assertTrue(rc == Milter.ACCEPT)
self.assertTrue(ctx._bodyreplaced,"Message body not replaced")
fp = ctx._body
with open('test/'+fname+".tstout","wb") as f:
f.write(fp.getvalue())
#self.assertTrue(fp.getvalue() == open("test/virus1.out","r").read())
fp.seek(0)
msg = mime.message_from_file(fp)
s = msg.get_payload(1).get_payload()
milter.log(s)
ctx._close()
def testDefang(self,fname='virus1'):
milter = TestMilter()
milter.setsymval('{auth_authen}','batman')
milter.setsymval('{auth_type}','batcomputer')
milter.setsymval('j','mailhost')
rc = milter.connect()
self.assertTrue(rc == Milter.CONTINUE)
with self.zf.open(fname) as fp:
rc = milter.feedFile(fp)
self.assertTrue(milter.user == 'batman',"getsymval failed")
# setsymlist not working in TestBase
#self.assertTrue(milter.auth_type != 'batcomputer',"setsymlist failed")
self.assertTrue(rc == Milter.ACCEPT)
self.assertTrue(milter._bodyreplaced,"Message body not replaced")
fp = milter._body
with open('test/'+fname+".tstout","wb") as f:
f.write(fp.getvalue())
#self.assertTrue(fp.getvalue() == open("test/virus1.out","r").read())
fp.seek(0)
msg = mime.message_from_file(fp)
s = msg.get_payload(1).get_payload()
milter.log(s)
milter.close()
def testParse(self,fname='spam7'):
milter = TestMilter()
milter.connect('somehost')
rc = milter.feedMsg(fname)
self.assertTrue(rc == Milter.ACCEPT)
self.assertFalse(milter._bodyreplaced,"Milter needlessly replaced body.")
fp = milter._body
with open('test/'+fname+".tstout","wb") as f:
f.write(fp.getvalue())
milter.close()
def testDefang2(self):
milter = TestMilter()
milter.connect('somehost')
rc = milter.feedMsg('samp1')
self.assertTrue(rc == Milter.ACCEPT)
self.assertFalse(milter._bodyreplaced,"Milter needlessly replaced body.")
with self.zf.open("virus3") as fp:
rc = milter.feedFile(fp)
self.assertTrue(rc == Milter.ACCEPT)
self.assertTrue(milter._bodyreplaced,"Message body not replaced")
fp = milter._body
with open("test/virus3.tstout","wb") as f:
f.write(fp.getvalue())
#self.assertTrue(fp.getvalue() == open("test/virus3.out","r").read())
with self.zf.open("virus6") as fp:
rc = milter.feedFile(fp)
self.assertTrue(rc == Milter.ACCEPT)
self.assertTrue(milter._bodyreplaced,"Message body not replaced")
self.assertTrue(milter._headerschanged,"Message headers not adjusted")
fp = milter._body
with open("test/virus6.tstout","wb") as f:
f.write(fp.getvalue())
milter.close()
def suite(): return unittest.makeSuite(BMSMilterTestCase,'test')
if __name__ == '__main__':
unittest.main()
|
sdgathman/pymilter
|
testsample.py
|
Python
|
gpl-2.0
| 5,060
|
#!/usr/bin/env python
import subprocess
import sys
from gi.repository import GLib, Gio
def main():
bus = Gio.bus_get_sync(Gio.BusType.SESSION, None)
proxy = Gio.DBusProxy.new_sync(bus, Gio.DBusProxyFlags.NONE, None,
'org.freedesktop.ScreenSaver', '/ScreenSaver',
'org.freedesktop.ScreenSaver', None)
cookie = proxy.Inhibit('(ss)', sys.argv[1],
"Wrapping this command in a screensaver inhibitor")
print('Inhibited the screensaver')
try:
subprocess.call(sys.argv[1:])
finally:
proxy.UnInhibit('(u)', cookie)
print('UnInhibited the screensaver')
if __name__ == '__main__':
if len(sys.argv) >= 2:
main()
else:
import os.path
print("usage: {} <program-to-wrap> [arguments to pass to program]"
.format(os.path.basename(sys.argv[0])))
|
CMaiku/inhibit-screensaver
|
inhibit-screensaver.py
|
Python
|
gpl-2.0
| 851
|
# -*- encoding: utf-8 -*- #
############################################################################
# Module Writen to OpenERP, Open Source Management Solution #
# Copyright (C) Vauxoo (<http://vauxoo.com>). #
# All Rights Reserved #
###############Credits######################################################
# Coded by: Sabrina Romero ([email protected]) #
# Planified by: Nhomar Hernandez ([email protected]) #
# Finance by: COMPANY NAME <EMAIL-COMPANY> #
# Audited by: author NAME LASTNAME <[email protected]> #
############################################################################
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import base64
import openerp.netsvc as netsvc
import logging
_logger = logging.getLogger(__name__)
class invoice_report_per_journal(osv.TransientModel):
"""
OpenERP Wizard: invoice.report.per.journal
"""
_name = "invoice.report.per.journal"
def get_journal_object(self, cr, uid, context=None):
record_brw = self.pool.get(context['active_model']).browse(
cr, uid, context['active_ids'][0])
if not record_brw.journal_id:
raise except_osv(_('ERROR !'), _(
'There is no journal configured for this invoice.'))
return record_brw.journal_id
def _get_journal(self, cr, uid, context=None):
return self.get_journal_object(cr, uid, context=context).name
def _prepare_service(self, cr, uid, report, context=None):
service = netsvc.LocalService('report.' + report.report_name)
(result, format) = service.create(cr, uid, context[
'active_ids'], {'model': context['active_model']}, {})
return (result, format)
def _get_report(self, cr, uid, context=None):
report = self.get_journal_object(
cr, uid, context=context).invoice_report_id
try:
(result, format) = self._prepare_service(cr, uid, report, context=context)
except:
if report:
_logger.warning("Error occurred in the report, the report set to the journal will be ignored.")
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'account.invoice'),], order="id",
context=context)[0]
report_ = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id, context=context)
(result, format) = self._prepare_service(cr, uid, report_, context=context)
try:
act_id = self.pool.get('ir.actions.act_window').search(cr, uid, [('name','=', report.name + ' txt')], context=context)[0]
if act_id:
act_brw = self.pool.get('ir.actions.act_window').browse(cr, uid, act_id, context=context)
wiz_obj = self.pool.get(act_brw.res_model)
wiz_id = wiz_obj.create(cr, uid, {}, context=context)
wiz_brw = wiz_obj.browse(cr, uid, wiz_id, context=context)
result = base64.decodestring(wiz_brw.fname_txt)
except:
if report:
_logger.info("txt report not defined for the report assigned to journal.")
return base64.encodestring(result)
def _get_report_name(self, cr, uid, context=None):
report = self.get_journal_object(cr, uid,
context=context).invoice_report_id
try:
(result, format) = self._prepare_service(cr, uid, report, context=context)
except:
if report:
_logger.warning("Error occurred in the report, the report set to the journal will be ignored.")
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'account.invoice'),], order="id",
context=context)[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id, context=context)
return report.report_name
def print_invoice(self, cr, uid, ids, context=None):
return {'type': 'ir.actions.report.xml',
'report_name': self._get_report_name(cr, uid, context=context),
'datas': {'ids': context['active_ids']}}
_columns = {
'journal': fields.char('Journal', 64, readonly=True, requied=True),
'report_format': fields.binary("Report", readonly=True, required=True)
}
_defaults = {
'journal': _get_journal,
'report_format': _get_report,
}
|
3dfxsoftware/cbss-addons
|
invoice_report_per_journal/wizard/invoice_report_per_journal.py
|
Python
|
gpl-2.0
| 5,756
|
__version__ = '19.10.0.cwop'
_release = '1665'
_commit = 'd22c19c'
|
3v1n0/pywws
|
src/pywws/__init__.py
|
Python
|
gpl-2.0
| 67
|
##
# Copyright 2012-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for iompi compiler toolchain (includes Intel compilers (icc, ifort) and OpenMPI.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.iccifort import IccIfort
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Iompi(IccIfort, OpenMPI):
"""
Compiler toolchain with Intel compilers (icc/ifort) and OpenMPI.
"""
NAME = 'iompi'
SUBTOOLCHAIN = IccIfort.NAME
|
hpcleuven/easybuild-framework
|
easybuild/toolchains/iompi.py
|
Python
|
gpl-2.0
| 1,514
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('city', models.CharField(max_length=50)),
('brand', models.CharField(max_length=50)),
('types', models.CharField(max_length=50)),
('car_time', models.CharField(max_length=30)),
('mileage', models.CharField(max_length=30)),
('car_price', models.CharField(max_length=30)),
('image_url', models.CharField(max_length=200)),
('car_url', models.CharField(max_length=200)),
('model', models.CharField(max_length=300)),
('transmission_mode', models.CharField(max_length=50)),
('have_accident', models.CharField(max_length=10)),
],
),
]
|
TheDavidGithub/mysite
|
car/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 1,116
|
from tests.support import RpgTestCase
from rpg import Base
class GuessTest(RpgTestCase):
def setUp(self):
self.base = Base()
def test_guess_name(self):
self.base._input_name = self.test_project_dir
self.assertEqual(str(self.base.guess_name()), str(self.test_project_dir))
self.base._input_name = "vec.zip"
self.assertEqual(str(self.base.guess_name()), "vec")
self.base._input_name = "vec.tar.gz"
self.assertEqual(str(self.base.guess_name()), "vec")
self.base._input_name = "vec.zip.zip"
self.assertEqual(str(self.base.guess_name()), "vec.zip")
def test_guess_name_fail(self):
self.base._input_name = self.test_project_dir / "NotADir"
self.assertEqual(str(self.base.guess_name()), str(""))
|
regeciovad/rpg
|
tests/unit/test_guess.py
|
Python
|
gpl-2.0
| 794
|
# encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python3/dist-packages/PyKDE4/kio.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KUrlComboBox(__PyKDE4_kdeui.KComboBox):
# no doc
def addDefaultUrl(self, *args, **kwargs): # real signature unknown
pass
def maxItems(self, *args, **kwargs): # real signature unknown
pass
def mouseMoveEvent(self, *args, **kwargs): # real signature unknown
pass
def mousePressEvent(self, *args, **kwargs): # real signature unknown
pass
def removeUrl(self, *args, **kwargs): # real signature unknown
pass
def setCompletionObject(self, *args, **kwargs): # real signature unknown
pass
def setDefaults(self, *args, **kwargs): # real signature unknown
pass
def setMaxItems(self, *args, **kwargs): # real signature unknown
pass
def setUrl(self, *args, **kwargs): # real signature unknown
pass
def setUrls(self, *args, **kwargs): # real signature unknown
pass
def urlActivated(self, *args, **kwargs): # real signature unknown
pass
def urls(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
Both = 0
Directories = 1
Files = -1
Mode = None # (!) real value is ''
OverLoadResolving = None # (!) real value is ''
RemoveBottom = 1
RemoveTop = 0
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kio/KUrlComboBox.py
|
Python
|
gpl-2.0
| 1,586
|
#! /usr/bin/python
## pysieved - Python managesieve server
## Copyright (C) 2007 Neale Pickett
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or (at
## your option) any later version.
## This program is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
## USA
import __init__
import warnings
class ScriptStorage(__init__.ScriptStorage):
def __init__(self):
self.scripts = {}
self.active = None
def __setitem__(self, k, v):
self.scripts[k] = v
def __getitem__(self, k):
return self.scripts[k]
def __delitem__(self, k):
if self.active == k:
raise ValueError('Script is active')
del self.scripts[k]
def __iter__(self):
for k in self.scripts:
yield k
def has_key(self, k):
return self.scripts.has_key(k)
def is_active(self, k):
return self.active == k
def set_active(self, k):
if k != None and k not in self.scripts:
raise KeyError('Unknown script')
self.active = k
class PysievedPlugin(__init__.PysievedPlugin):
def init(self, config):
self.warn = config.getboolean('Accept', 'warn', True)
def auth(self, params):
if self.warn:
warnings.warn('The "accept" module is for testing only!')
return True
def lookup(self, params):
if self.warn:
warnings.warn('The "accept" module is for testing only!')
return '/tmp'
def create_storage(self, params):
if self.warn:
warnings.warn('The "accept" module is for testing only!')
return ScriptStorage()
|
miracle2k/pysieved
|
plugins/accept.py
|
Python
|
gpl-2.0
| 2,161
|
# coding: utf-8
# In[12]:
import os
from shutil import copyfile
import subprocess
from save_embedded_graph27 import main_binary as embed_main
from spearmint_ghsom import main as ghsom_main
import numpy as np
import pickle
from time import time
def save_obj(obj, name):
with open(name + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name):
with open(name + '.pkl', 'rb') as f:
return pickle.load(f)
#root dir
os.chdir("C:\Miniconda3\Jupyter\GHSOM_simplex_dsd")
#save directory
dir = os.path.abspath("parameter_tests")
#number of times to repeat
num_repeats = 10
#number of nodes in communitiy
s1 = 32
#number of links to same community
z1 = 16
#number of nodes in micro community
minc = s1
maxc = s1
#make save directory
if not os.path.isdir(dir):
os.mkdir(dir)
#change to dir
os.chdir(dir)
#network file names -- output of network generator
network = "network.dat"
first_level = "community.dat"
#community labels
labels = 'firstlevelcommunity'
#mixing parameter
z2 = 16
#node degree
k = z1 + z2
maxk = k
#mixing factors
mu = float(z2) / k
num_communities = [3, 4, 5, 6]
parameter_settings = [0.5, 0.6, 0.7, 0.8, 0.9, 1]
overall_nmi_scores = np.zeros((len(num_communities), len(parameter_settings), num_repeats))
for i in range(len(num_communities)):
# for k1 in num_communities:
k1 = num_communities[i]
#number of nodes in the network
N = k1 * s1
#create directory
dir_string = os.path.join(dir, str(k1))
if not os.path.isdir(dir_string):
os.mkdir(dir_string)
#change working directory
os.chdir(dir_string)
for j in range(len(parameter_settings)):
# for p in parameter_settings:
p = parameter_settings[j]
#ghsom parameters
params = {'w': 0.0001,
'eta': 0.0001,
'sigma': 1,
'e_sg': p,
'e_en': 0.8}
#create directory
dir_string_p = os.path.join(dir_string, str(p))
if not os.path.isdir(dir_string_p):
os.mkdir(dir_string_p)
#change working directory
os.chdir(dir_string_p)
if os.path.isfile('nmi_scores.csv'):
print 'already completed {}/{}, loading scores and continuing'.format(k1, p)
nmi_scores = np.genfromtxt('nmi_scores.csv', delimiter=',')
print nmi_xcores
overall_nmi_scores[i,j,:] = nmi_scores
continue
#copy executable
ex = "benchmark.exe"
if not os.path.isfile(ex):
source = "C:\\Users\\davem\\Documents\\PhD\\Benchmark Graph Generators\\binary_networks\\benchmark.exe"
copyfile(source, ex)
#make benchmark parameter file
filename = "benchmark_flags_{}_{}.dat".format(k1,p)
if not os.path.isfile(filename):
with open(filename,"w") as f:
f.write("-N {} -k {} -maxk {} -minc {} -maxc {} -mu {}".format(N, k, maxk, minc, maxc, mu))
print 'written flag file: {}'.format(filename)
#cmd strings
change_dir_cmd = "cd {}".format(dir_string_p)
generate_network_cmd = "benchmark -f {}".format(filename)
#output of cmd
output_file = open("cmd_output.out", 'w')
#record NMI scores
if not os.path.isfile('nmi_scores.pkl'):
print 'creating new nmi scores array'
nmi_scores = np.zeros(num_repeats)
else:
print 'loading nmi score progress'
nmi_scores = load_obj('nmi_scores')
#record running times
if not os.path.isfile('running_times.pkl'):
print 'creating new running time array'
running_times = np.zeros(num_repeats)
else:
print 'loading running time progress'
running_times = load_obj('running_times')
print
#generate networks
for r in range(1, num_repeats+1):
network_rename = "{}_{}".format(r,network)
first_level_rename = "{}_{}".format(r,first_level)
gml_filename = 'embedded_network_{}.gml'.format(r)
if not os.path.isfile(network_rename):
process = subprocess.Popen(change_dir_cmd + " && " + generate_network_cmd,
stdout=output_file,
stderr=output_file,
shell=True)
process.wait()
print 'generated graph {}'.format(r)
os.rename(network, network_rename)
os.rename(first_level, first_level_rename)
print 'renamed graph {}'.format(r)
if not os.path.isfile(gml_filename):
##embed graph
embed_main(network_rename, first_level_rename)
print 'embedded graph {} as {} in {}'.format(r, gml_filename, os.getcwd())
##score for this network
if not np.all(nmi_scores[r-1]):
start_time = time()
print 'starting ghsom for: {}/{}/{}'.format(k1, p, gml_filename)
nmi_score, communities_detected = ghsom_main(params, gml_filename, labels)
nmi_scores[r-1] = nmi_score
running_time = time() - start_time
print 'running time of algorithm: {}'.format(running_time)
running_times[r-1] = running_time
#save
save_obj(nmi_scores, 'nmi_scores')
save_obj(running_times, 'running_times')
print 'saved nmi score for network {}: {}'.format(gml_filename, nmi_score)
print
##output nmi scores to csv file
print 'writing nmi scores and running times to file'
np.savetxt('nmi_scores.csv',nmi_scores,delimiter=',')
np.savetxt('running_times.csv',running_times,delimiter=',')
print
print 'DONE'
print 'OVERALL NMI SCORES'
print overall_nmi_scores
# In[9]:
for i in range(len(num_communities)):
for j in range(len(parameter_settings)):
scores = overall_nmi_scores[i,j]
# print scores
# idx = np.argsort(scores)[::-1]
# print parameter_settings[idx[0]]
print np.mean(scores)
print np.std(scores) / num_repeats
print
# In[ ]:
|
DavidMcDonald1993/ghsom
|
parameter_tests.py
|
Python
|
gpl-2.0
| 6,711
|
import os, sys
from Products.Archetypes.interfaces.layer import ILayerContainer
from Products.Archetypes.atapi import *
from Products.ATContentTypes.tests.utils import dcEdit
from Products.ATContentTypes.tests.utils import EmptyValidator
from Products.ATContentTypes.tests.utils import EmailValidator
if __name__ == '__main__':
execfile(os.path.join(sys.path[0], 'framework.py'))
from uwoshgrantstestcase import UwoshgrantsTestCase
from Products.CMFCore.WorkflowCore import WorkflowException
class TestUwoshgrantsProposalWorkflow(UwoshgrantsTestCase):
def createProosal(self):
self.login(self._default_user)
self.portal.invokeFactory(type_name="Proposal", id="testproposalsubmit")
return self.portal['testproposalsubmit']
def test_defaults_should_be_correctly_set_and_file_attached(self):
pro = self.createProosal()
self.fill_out_proposal(pro)
pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
#self.portal_workflow.doActionFor(pro, 'submit')
#self.portal_workflow.doActionFor(pro, 'sendToGroup')
def test_transition_submit(self):
#pro = self.createProosal()
#self.fill_out_proposal(pro)
#pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
try:
self.portal_workflow.doActionFor( pro, 'submit')
self.assertEquals(True, False)
except:
print "submit failed"
pass
def test_transition_sendToGroup(self):
#pro = self.createProosal()
#self.fill_out_proposal(pro)
self.login('director1')
##pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
try:
pro.setFacultyReviewer(['reviewer1','reviewer2'])
#import pdb;pdb.set_trace()
self.portal_workflow.doActionFor( pro, 'sendToGroup')
self.assertEquals(True, False)
except:
print "sendToGroup failed"
#pass
"""
def test_no_other_roles_should_be_able_to_do_action(self):
pro = self.createProosal()
self.login('director1')
pro.setFacultyReviewer(['Reviewer One','Reviewer Two'])
self.logout()
for user in self._all_users:
if user != 'director1':
self.login(user)
self.assertRaises(WorkflowException, self.portal_workflow.doActionFor, pro, 'sendToGroup')
self.logout()
try:
self.portal_workflow.doActionFor( pro, 'sendToGroup')
self.assertEquals(True, False)
except WorkflowException, e:
print "sendToGroup failed",e
def test_transition_sendToPanel(self):
pro = self.createProosal()
self.fill_out_proposal(pro)
self.login('director1')
#import pdb;pdb.set_trace()
#pro.invokeFactory(type_name="File", id="10_it_organizations-33.pdf")
self.login('director1')
pro.setFacultyReviewer(['Reviewer One','Reviewer Two'])
self.portal_workflow.doActionFor( pro, 'sendToPanel')
def test_transition_sendToProposer(self):
pro = self.createProosal()
self.fill_out_proposal(pro)
self.login('director1')
#pro.invokeFactory(type_name="File", id="10_it_organizations-4.pdf")
#self.login('director1')
#pro.setFacultyReviewer([1,2])
pro.setProposalApproved(True)
self.portal_workflow.doActionFor( pro, 'sendToProposer')
"""
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestUwoshgrantsProposalWorkflow))
return suite
if __name__ == '__main__':
framework()
|
uwosh/uwosh.grants
|
tests/testUwoshgrantsProposalWorkflow.py
|
Python
|
gpl-2.0
| 3,768
|
import urllib2
from bs4 import BeautifulSoup
def isSemanticTag(tag):
name = tag.name
if name == 'header':
return True
elif name == 'nav':
return True
elif name == 'section':
return True
elif name == 'article':
return True
elif name == 'aside':
return True
elif name == 'figcaption':
return True
elif name == 'figure':
return True
elif name == 'footer':
return True
else:
return False
req = urllib2.Request('http://uopbustimetable.appspot.com/home.action')
response = urllib2.urlopen(req)
the_page = response.read()
soup = BeautifulSoup(the_page)
#print(soup.prettify())
for tag in soup.find_all(isSemanticTag):
print ''
print ''
print ''
print ''
print ''
print ''
print ''
print ''
print(tag.name)
print(tag.contents)
|
AdamHansrod/SemanticTagFinder
|
HTML5TagFinder.py
|
Python
|
gpl-2.0
| 884
|
# Copyright 2008-2010 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Bio.AlignIO support for the "nexus" file format.
You are expected to use this module via the Bio.AlignIO functions (or the
Bio.SeqIO functions if you want to work directly with the gapped sequences).
See also the Bio.Nexus module (which this code calls internally),
as this offers more than just accessing the alignment or its
sequences as SeqRecord objects.
"""
from __future__ import print_function
import sys
# Add path to Bio
sys.path.append('../..')
from Bio.SeqRecord import SeqRecord
from Bio.Nexus import Nexus
from Bio.Align import MultipleSeqAlignment
from Bio.AlignIO.Interfaces import AlignmentWriter
from Bio import Alphabet
__docformat__ = "restructuredtext en"
# You can get a couple of example files here:
# http://www.molecularevolution.org/resources/fileformats/
# This is a generator function!
def NexusIterator(handle, seq_count=None):
"""Returns SeqRecord objects from a Nexus file.
Thus uses the Bio.Nexus module to do the hard work.
You are expected to call this function via Bio.SeqIO or Bio.AlignIO
(and not use it directly).
NOTE - We only expect ONE alignment matrix per Nexus file,
meaning this iterator will only yield one MultipleSeqAlignment.
"""
n = Nexus.Nexus(handle)
if not n.matrix:
# No alignment found
raise StopIteration
# Bio.Nexus deals with duplicated names by adding a '.copy' suffix.
# The original names and the modified names are kept in these two lists:
assert len(n.unaltered_taxlabels) == len(n.taxlabels)
if seq_count and seq_count != len(n.unaltered_taxlabels):
raise ValueError("Found %i sequences, but seq_count=%i"
% (len(n.unaltered_taxlabels), seq_count))
# TODO - Can we extract any annotation too?
records = (SeqRecord(n.matrix[new_name], id=new_name,
name=old_name, description="")
for old_name, new_name
in zip(n.unaltered_taxlabels, n.taxlabels))
# All done
yield MultipleSeqAlignment(records, n.alphabet)
class NexusWriter(AlignmentWriter):
"""Nexus alignment writer.
Note that Nexus files are only expected to hold ONE alignment
matrix.
You are expected to call this class via the Bio.AlignIO.write() or
Bio.SeqIO.write() functions.
"""
def write_file(self, alignments):
"""Use this to write an entire file containing the given alignments.
Arguments:
- alignments - A list or iterator returning MultipleSeqAlignment objects.
This should hold ONE and only one alignment.
"""
align_iter = iter(alignments) # Could have been a list
try:
first_alignment = next(align_iter)
except StopIteration:
first_alignment = None
if first_alignment is None:
# Nothing to write!
return 0
# Check there is only one alignment...
try:
second_alignment = next(align_iter)
except StopIteration:
second_alignment = None
if second_alignment is not None:
raise ValueError("We can only write one Alignment to a Nexus file.")
# Good. Actually write the single alignment,
self.write_alignment(first_alignment)
return 1 # we only support writing one alignment!
def write_alignment(self, alignment):
# Creates an empty Nexus object, adds the sequences,
# and then gets Nexus to prepare the output.
if len(alignment) == 0:
raise ValueError("Must have at least one sequence")
columns = alignment.get_alignment_length()
if columns == 0:
raise ValueError("Non-empty sequences are required")
minimal_record = "#NEXUS\nbegin data; dimensions ntax=0 nchar=0; " \
+ "format datatype=%s; end;" \
% self._classify_alphabet_for_nexus(alignment._alphabet)
n = Nexus.Nexus(minimal_record)
n.alphabet = alignment._alphabet
for record in alignment:
n.add_sequence(record.id, str(record.seq))
# For smaller alignments, don't bother to interleave.
# For larger alginments, interleave to avoid very long lines
# in the output - something MrBayes can't handle.
# TODO - Default to always interleaving?
n.write_nexus_data(self.handle, interleave=(columns > 1000))
def _classify_alphabet_for_nexus(self, alphabet):
"""Returns 'protein', 'dna', 'rna' based on the alphabet (PRIVATE).
Raises an exception if this is not possible."""
# Get the base alphabet (underneath any Gapped or StopCodon encoding)
a = Alphabet._get_base_alphabet(alphabet)
"""condition loop below was edited by Ambuj Kumar in order to make
it align with ConCat"""
if 'Alphabet.Alphabet' not in str(type(a)) and 'Alphabet.ProteinAlphabet' not in str(type(a)) and 'Alphabet.DNAAlphabet' not in str(type(a)) and 'Alphabet.RNAAlphabet' not in str(type(a)) and 'Alphabet.Gapped' not in str(type(a)):
raise TypeError("Invalid alphabet")
elif 'Protein' in str(type(a)):
return "protein"
elif 'DNA' in str(type(a)):
return "dna"
elif 'RNA' in str(type(a)):
return "rna"
else:
# Must be something like NucleotideAlphabet or
# just the generic Alphabet (default for fasta files)
raise ValueError("Need a DNA, RNA or Protein alphabet")
if __name__ == "__main__":
from Bio._py3k import StringIO
print("Quick self test")
print("")
print("Repeated names without a TAXA block")
handle = StringIO("""#NEXUS
[TITLE: NoName]
begin data;
dimensions ntax=4 nchar=50;
format interleave datatype=protein gap=- symbols="FSTNKEYVQMCLAWPHDRIG";
matrix
CYS1_DICDI -----MKVIL LFVLAVFTVF VSS------- --------RG IPPEEQ----
ALEU_HORVU MAHARVLLLA LAVLATAAVA VASSSSFADS NPIRPVTDRA ASTLESAVLG
CATH_HUMAN ------MWAT LPLLCAGAWL LGV------- -PVCGAAELS VNSLEK----
CYS1_DICDI -----MKVIL LFVLAVFTVF VSS------- --------RG IPPEEQ---X
;
end;
""")
for a in NexusIterator(handle):
print(a)
for r in a:
print("%r %s %s" % (r.seq, r.name, r.id))
print("Done")
print("")
print("Repeated names with a TAXA block")
handle = StringIO("""#NEXUS
[TITLE: NoName]
begin taxa
CYS1_DICDI
ALEU_HORVU
CATH_HUMAN
CYS1_DICDI;
end;
begin data;
dimensions ntax=4 nchar=50;
format interleave datatype=protein gap=- symbols="FSTNKEYVQMCLAWPHDRIG";
matrix
CYS1_DICDI -----MKVIL LFVLAVFTVF VSS------- --------RG IPPEEQ----
ALEU_HORVU MAHARVLLLA LAVLATAAVA VASSSSFADS NPIRPVTDRA ASTLESAVLG
CATH_HUMAN ------MWAT LPLLCAGAWL LGV------- -PVCGAAELS VNSLEK----
CYS1_DICDI -----MKVIL LFVLAVFTVF VSS------- --------RG IPPEEQ---X
;
end;
""")
for a in NexusIterator(handle):
print(a)
for r in a:
print("%r %s %s" % (r.seq, r.name, r.id))
print("Done")
print("")
print("Reading an empty file")
assert 0 == len(list(NexusIterator(StringIO())))
print("Done")
print("")
print("Writing...")
handle = StringIO()
NexusWriter(handle).write_file([a])
handle.seek(0)
print(handle.read())
handle = StringIO()
try:
NexusWriter(handle).write_file([a, a])
assert False, "Should have rejected more than one alignment!"
except ValueError:
pass
|
Ambuj-UF/ConCat-1.0
|
src/Utils/Bio/AlignIO/NexusIO.py
|
Python
|
gpl-2.0
| 7,881
|
#! /usr/bin/env python
#-*- coding: utf-8 -*-
#################################################################
# Copyright (C) 2015 Sean Guo. All rights reserved.
#
# > File Name: < set_English.py >
# > Author: < Sean Guo >
# > Mail: < [email protected] >
# > Created Time: < 2015/03/30 >
# > Last Changed:
# > Description:
#################################################################
from naoqi import ALProxy
robot_ip = "192.168.1.100"
robot_port = 9559 # default port : 9559
tts = ALProxy("ALTextToSpeech", robot_ip, robot_port)
tts.setLanguage("English")
tts.say("Hello, world! I am Nao robot!")
# 切换语言包需要较长时间,故尽量不要在程序运行时切换;
|
SeanXP/Nao-Robot
|
python/language/set_English.py
|
Python
|
gpl-2.0
| 753
|
#!/usr/bin/env python
#Copyright 2004,2008 Sebastian Hagen
# This file is part of gonium.
#
# gonium is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# gonium is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import fcntl
class PidFile:
def __init__(self, filename:bytes=None):
"""Open pid-file."""
if (filename is None):
argv0 = sys.argv[0]
if (isinstance(argv0, str)):
# Get rid of silly unicode names
argv0 = argv0.encode()
filename = os.path.basename(argv0) + b'.pid'
if (os.path.exists(filename)):
mode = 'r+b'
else:
mode = 'wb'
# The feature allowing for calling open() on bytes filenames was added
# somewhere between CPython 3.0-rc1 and -rc3. This version is written
# for 3.0 final, so using it should be fine.
self.filename = filename
self.file = open(filename, mode)
def lock(self, else_die:bool=False):
"""Acquire lock on pid file; if successful, write our pid to it. If
the optional argument is specified and True, any IOErrors will
be caught and turned into SystemExits."""
try:
fcntl.lockf(self.file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
if (else_die):
print('Our pid-file {0} is already locked, aborting.'.format(self.filename,))
sys.exit(0)
raise
self.file.seek(0)
self.file.write(ascii(os.getpid()).encode('ascii'))
self.file.truncate()
def unlock(self):
"""Release lock on pid file."""
fcntl.lockf(self.file.fileno(), fcntl.LOCK_UN)
|
sh01/gonium
|
src/pid_filing.py
|
Python
|
gpl-2.0
| 2,162
|
#!/usr/bin/env python
import os
from Game3View import Game3View
from HomeView import HomeView
import HomeController
import sys
from gi.repository import Gtk
from gi.repository import Gdk
from random import randint
from random import shuffle
class Game3Controller:
def __init__(self, view, parent):
self.view = view
self.parent = parent
#calls the proper method when the button is clicked
self.view.skip.connect_object("clicked", self.skip_press, "SKIP")
self.view.word1.connect_object("clicked", self.check_correct, "0")
self.view.word2.connect_object("clicked", self.check_correct, "1")
self.view.word3.connect_object("clicked", self.check_correct, "2")
self.view.word4.connect_object("clicked", self.check_correct, "3")
self.view.word5.connect_object("clicked", self.check_correct, "4")
self.back_button_signal = self.view.navBar.button.connect("clicked", self.home_page)
# Fields of the controller
self.numGuesses = 1
self.level = 1
self.score = 0
self.skipsLeft = 3
self.definitions = []
self.Words = []
self.roundList = []
self.picked = []
self.def_array = []
self.totalScore = 0
self.isNext = False
self.gotPoints = False
self.nextLevel = False
self.view.skip.set_label("SKIP\n(" + str(self.skipsLeft) + " Left)")
self.generate_level()
#loads the words and definitions, then sets up the level
def generate_level(self):
self.get_correct(self.level)
self.load_level_definitions(self.level)
self.make_round()
#resets the resultLabel and skip button to initial value, and resets gotPoints
#to false. Sets up the words to display on the buttons and definition to display
def make_round(self):
self.view.resultLabel.set_text("")
self.view.skip.set_label("SKIP\n(" + str(self.skipsLeft) + " Left)")
self.numGuesses = 1
self.gotPoints = False
self.roundList = []
self.picked = []
self.def_array = []
#gets 5 unique words for the round, and the correspoinding defintions
while len(self.roundList) < 5:
x = randint(0,len(self.Words)-1)
if x not in self.picked:
self.roundList.append(self.Words[x])
self.def_array.append(x)
self.picked.append(x)
shuffle(self.picked)
self.view.def1.set_markup("<span size='14000'><b> Definition: " + self.definitions[self.picked[0]] + "</b></span>")
self.view.word1.set_label(self.roundList[0])
self.view.word2.set_label(self.roundList[1])
self.view.word3.set_label(self.roundList[2])
self.view.word4.set_label(self.roundList[3])
self.view.word5.set_label(self.roundList[4])
#negates the skipLeft decrease if the label is currently next
#makes a new round while skips are left and increments variables accordingly
#When it is the end of the level, resets the screen
def skip_press(self,widget):
if self.isNext:
self.skipsLeft += 1
self.isNext = False
if self.skipsLeft > 0:
self.make_round()
self.skipsLeft = self.skipsLeft - 1
self.totalScore +=10
self.view.skip.set_label("SKIP\n(" + str(self.skipsLeft) + " Left)")
else:
self.view.resultLabel.set_text("No Skips Left!")
if self.nextLevel:
#puts the widgets back on the screen for the next level
self.nextLevel = False
self.view.label.set_text("LEVEL " + str(self.level))
self.view.word1.show()
self.view.word2.show()
self.view.word3.show()
self.view.word4.show()
self.view.word5.show()
self.view.def1.show()
self.generate_level()
#if def matches word, updates variables accordingly and deletes the word and def from the array
#when there are less than 5 words left, end the level
def check_correct(self,widget):
#checks if number matches the number at int(widget) index
if self.numGuesses == 0:
self.endLevel()
self.view.label.set_markup("<span size='10000'><b>Incorrect. Too many guesses</b></span>")
self.skipsLeft += 1
#self.nextLevel = False
if self.picked[0] == self.def_array[int(widget)] and self.isNext==False:
self.view.resultLabel.set_markup("<span size='10000'><b>CORRECT!</b></span>")
self.updateScore(10)
self.view.skip.set_label("NEXT")
self.isNext = True
self.gotPoints = True
del self.definitions[self.picked[0]]
del self.Words[self.picked[0]]
else:
if self.gotPoints == False:
if self.numGuesses > 0:
self.view.resultLabel.set_markup("<span size='10000'><b>INCORRECT! " + str(self.numGuesses) + " left.</b></span>")
self.numGuesses -= 1
#the player answered enough correctly to move on.
if len(self.definitions) <= 5:
self.level += 1
self.totalScore +=10
self.endLevel()
#hides the variables to display the results from the level
def endLevel(self):
self.view.word1.hide()
self.view.word2.hide()
self.view.word3.hide()
self.view.word4.hide()
self.view.word5.hide()
#need the self.level-1 since we already incremented it
self.view.label.set_text("Level " +str(self.level-1) + " completed. You have scored " + str(self.score) + " out of " + str(self.totalScore) + " points.")
self.view.def1.hide()
self.view.resultLabel.set_text("")
self.view.skip.set_label("Continue")
self.nextLevel = True
# This function takes in a file name and load all the words from the corresponding file
def load_file(self, filename):
file = open(filename)
word = file.readline()
wordlist = []
while len(word) > 0:
wordlist.append(word[:len(word)-1])
word = file.readline()
return wordlist
# This function takes in a file name and load all the words from the corresponding file
def get_correct(self, level):
self.Words = self.load_file("Game2-CorrectLevel" + str(level))
# This function takes in a file name and load all the words from the corresponding file
def load_level_definitions(self, level):
self.definitions = self.load_file("CorrectlySpelled - Definitions" + str(level))
#increates the score when points have no already been awarded
def updateScore(self, increment):
self.score += increment
self.view.scoreLabel.set_text("SCORE: " + str(self.score))
def home_page(self, button):
self.view = HomeView(self.parent)
self.controller = HomeController.HomeController(self.view, self.parent)
|
nguyenla/Spelling
|
Game3Controller.py
|
Python
|
gpl-2.0
| 7,003
|
# Generated by Django 2.2.13 on 2021-09-28 11:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bands', '0048_band_profile_thumb'),
]
operations = [
migrations.AddField(
model_name='band',
name='hidden_in_catalog',
field=models.BooleanField(default=False, help_text='Ocultar el perfil del listado, para bandas que no son de Alcala pero se crea su perfil para ciclos y festivales', verbose_name='Oculto en el listado principal'),
),
]
|
InsulaCoworking/MusicCity
|
bands/migrations/0049_band_hidden_in_catalog.py
|
Python
|
gpl-2.0
| 567
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='netapp_metrics',
version='0.1.1',
description='NetApp OnTAP API wrapper',
long_description='API wrapper for NetApp OnTAP API that understands '
'between cluster mode (C-mode) and non-cluster (7-mode) mode',
url='https://github.com/allyunion/netapp_metrics',
author='Jason Y. Lee',
author_email='[email protected]',
license='GPLv2',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: GNU General Public License v2',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='netapp ontap api metrics wrapper development',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
)
|
allyunion/netapp_metrics
|
setup.py
|
Python
|
gpl-2.0
| 1,200
|
#
# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import os
import mutlib
from mysql.utilities.common.server import Server
from mysql.utilities.exception import UtilError, MUTLibError
class test(mutlib.System_test):
"""clone server parameters
This test exercises the parameters for mysqlserverclone
"""
def check_prerequisites(self):
return self.check_num_servers(1)
def setup(self):
# No setup needed
self.new_server = None
return True
def _test_server_clone(self, cmd_str, comment, kill=True, capture_all=False):
self.results.append(comment+"\n")
port1 = int(self.servers.get_next_port())
cmd_str += " --new-port=%d " % port1
full_datadir = os.path.join(os.getcwd(), "temp_%s" % port1)
cmd_str += " --new-data=%s --delete " % full_datadir
res = self.exec_util(cmd_str, "start.txt")
for line in open("start.txt").readlines():
# Don't save lines that have [Warning] or don't start with #
index = line.find("[Warning]")
if capture_all or (index <= 0 and line[0] == '#'):
self.results.append(line)
if res:
raise MUTLibError("%s: failed" % comment)
# Create a new instance
conn = {
"user" : "root",
"passwd" : "root",
"host" : "localhost",
"port" : port1,
"unix_socket" : full_datadir + "/mysql.sock"
}
if os.name != "posix":
conn["unix_socket"] = None
server_options = {
'conn_info' : conn,
'role' : "cloned_server_2",
}
self.new_server = Server(server_options)
if self.new_server is None:
return False
if kill:
# Connect to the new instance
try:
self.new_server.connect()
except UtilError, e:
self.new_server = None
raise MUTLibError("Cannot connect to spawned server.")
self.servers.stop_server(self.new_server)
self.servers.clear_last_port()
return True
def run(self):
self.res_fname = "result.txt"
base_cmd = "mysqlserverclone.py --server=%s --root-password=root " % \
self.build_connection_string(self.servers.get_server(0))
test_cases = [
# (comment, command options, kill running server)
("show help", " --help ", False, True),
("write command to file", " --write-command=startme.sh ",
True, False),
("write command to file shortcut", " -w startme.sh ", True, False),
("verbosity = -v", " -v ", True, False),
("verbosity = -vv", " -vv ", True, False),
("verbosity = -vvv", " -vvv ", True, False),
("-vvv and write command to file shortcut",
" -vvv -w startme.sh ", True, False),
]
test_num = 1
for row in test_cases:
new_comment = "Test case %d : %s" % (test_num, row[0])
if not self._test_server_clone(base_cmd + row[1],
new_comment, row[2], row[3]):
raise MUTLibError("%s: failed" % new_comment)
test_num += 1
self.replace_result("# -uroot", "# -uroot [...]\n")
self.replace_result("# mysqld:",
"# mysqld: XXXXXXXXXXXX\n")
self.replace_result("# mysqladmin:",
"# mysqladmin: XXXXXXXXXXXX\n")
self.replace_result("# mysql_system_tables.sql:",
"# mysql_system_tables.sql: XXXXXXXXXXXX\n")
self.replace_result("# mysql_system_tables_data.sql:",
"# mysql_system_tables_data.sql: XXXXXXXXXXXX\n")
self.replace_result("# mysql_test_data_timezone.sql:",
"# mysql_test_data_timezone.sql: XXXXXXXXXXXX\n")
self.replace_result("# fill_help_tables.sql:",
"# fill_help_tables.sql: XXXXXXXXXXXX\n")
self.remove_result("# trying again...")
return True
def get_result(self):
return self.compare(__name__, self.results)
def record(self):
return self.save_result_file(__name__, self.results)
def _remove_file(self, filename):
try:
os.unlink(filename)
except:
pass
def cleanup(self):
files = [self.res_fname, "start.txt", "startme.sh"]
for file in files:
self._remove_file(file)
return True
|
dannykopping/mysql-utilities
|
mysql-test/t/clone_server_parameters.py
|
Python
|
gpl-2.0
| 5,484
|
from django.conf.urls import patterns, url
import views as views
urlpatterns = patterns('',
#url(r'^$', views.index, name='index'),
#url(r'index.html', views.index, name='index')
)
|
jxp360/golfapp
|
golfapp/apps/piffycup/urls.py
|
Python
|
gpl-2.0
| 191
|
""" This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Special thanks for help with this resolver go out to t0mm0, jas0npc,
mash2k3, Mikey1234,voinage and of course Eldorado. Cheers guys :)
"""
import re
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
from urlresolver import common
import xbmc
from lib import jsunpack
net = Net()
USER_AGENT='Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:30.0) Gecko/20100101 Firefox/30.0'
class vidto(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "vidto"
domains = [ "vidto.me" ]
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
try:
web_url = self.get_url(host, media_id)
headers = {
'Referer': web_url,
'User-Agent': USER_AGENT
}
html = self.net.http_GET(web_url).content
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)"', html)
if r:
for name, value in r:
data[name] = value
data['referer'] = web_url
data['imhuman']='Proceed to video'
xbmc.sleep(6000) # don't replace with countdown, crashes on linux
html = net.http_POST(web_url, data, headers=headers).content
match = re.search('(eval\(function.*)\s*</script>', html, re.DOTALL)
if match:
packed_data = match.group(1)
js_data = jsunpack.unpack(packed_data)
max_label=0
stream_url = ''
for match in re.finditer('label:\s*"(\d+)p"\s*,\s*file:\s*"([^"]+)', js_data):
label, link = match.groups()
if int(label)>max_label:
stream_url = link
max_label = int(label)
if stream_url:
return stream_url
else:
raise Exception("File Link Not Found")
else:
raise Exception("Packed Data Not Found")
except Exception, e:
common.addon.log('**** Vidto Error occured: %s' % e)
common.addon.show_small_popup('Error', str(e), 5000, '')
return self.unresolvable(code=0, msg='Exception: %s' % e)
def get_url(self, host, media_id):
return 'http://vidto.me/%s.html' % media_id
def get_host_and_id(self, url):
r = re.search('//(.+?)/(?:embed-)?([0-9A-Za-z]+)',url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return (re.match('http://(www.)?vidto.me/' +
'[0-9A-Za-z]+', url) or 'vidto.me' in host)
|
VioletRed/script.module.urlresolver
|
lib/urlresolver/plugins/vidto.py
|
Python
|
gpl-2.0
| 3,698
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from fotos import views as fotos
from index import views as inicio
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qpasacix.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^albumes/', fotos.obtienealbumes, name='obtienealbumes'),
url(r'^album/(\d+)/', fotos.muestraalbum, name='muestraalbum'),
url(r'^index/', inicio.inicio, name='inicio'),
url(r'^comparte/', inicio.compartir, name='compartir'),
)
|
christianmtr/qpasacix
|
qpasacix/urls.py
|
Python
|
gpl-2.0
| 584
|
# -*- coding: utf-8 -*-
#
# (DC)² - DataCenter Deployment Control
# Copyright (C) 2010, 2011, 2012, 2013, 2014 Stephan Adig <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
class KerberosError(Exception):
pass
class KerberosTicketExpired(KerberosError):
pass
|
sadig/DC2
|
components/dc2-lib/dc2/lib/exceptions/authentication.py
|
Python
|
gpl-2.0
| 948
|
#coding:utf-8
import urllib
import sys
file2014 = open('2014Kresult.txt','r').readlines()
file2015 = open('2015Kresult.txt','r').readlines()
result = open('2015result.txt','w')
dic2014 = {}
dic2015 = {}
dic = {}
uvCount = 0
pvCount = 0
ec = 0
c = 0
pv2014 = 0
pv2015 = 0
#2014关键字去重汇总
for line in file2014:
try:
kw, pv = line.split('\t')
if kw in dic2014:
dic2014[kw] = dic2014[kw] + int(pv[:-1])
else:
dic2014[kw] = int(pv[:-1])
pv2014 = pv2014 + int(pv[:-1])
except:
print line
#2015关键字去重汇总
for line in file2015:
try:
kw, pv = line.split('\t')
if kw in dic2015:
dic2015[kw] = dic2015[kw] + int(pv[:-1])
else:
dic2015[kw] = int(pv[:-1])
pv2015 = pv2015 + int(pv[:-1])
except:
print line
print pv2014, pv2015
#计算二者之间的交集和差集
dif2015 = list(set(dic2015) - set(dic2014))
dif2014 = list(set(dic2014) - set(dic2015))
same = list(set(dic2014)&set(dic2015))
print '2015 and 2014:', len(dic2015), len(dic2014)
print 'same:',len(same), 'dif2014:', len(dif2014), 'dif2015:',len(dif2015)
citys = open('cdsx/city.txt','r').readlines()
districts = open('cdsx/district.txt','r').readlines()
streets = open('cdsx/street.txt','r').readlines()
xiaoqus = open('cdsx/xiaoqu.txt','r').readlines()
zufangKw = ['租房','出租']
dis = []
city = []
street = []
xiaoqu = []
for line in citys:
dis.append(line[:-1])
for line in districts:
city.append(line[:-1])
for line in streets:
street.append(line[:-1])
for line in xiaoqus:
xiaoqu.append(line[:-1])
'''
def dicGenerator(dim=[]):
dic = {}
dimLength = len(dim)
if dimLength == 0:
return dic
else:
for i in xrange(0,dimLength):
pass
'''
dic = {}
kwType = ['same','dif']
years = [2014,2015]
statType = ['count','sum']
isBrand = ['isBrand', 'notBrand']
for y in years:
dic[y] = {}
for k in kwType:
dic[y][k] = {}
for b in isBrand:
dic[y][k][b] = {}
for t in statType:
dic[y][k][b][t] = {}
count2014 = 0
count2015 = 0
sum2014 = 0
sum2015 = 0
c2014 = 0
c2015 = 0
s2014 = 0
s2015 = 0
#same KW 词类分布情况统计(分别修改city district street)
for kw in same:
'''sum2014 = sum2014 + dic2014[kw]
sum2015 = sum2015 + dic2015[kw]
if kw.find('赶集') != -1:
c2014 = c2014 + 1
s2014 = s2014 + dic2014[kw]
s2015 = s2015 + dic2015[kw]
continue'''
#result.write(kw + '\t' + str(dic2014[kw]) + '\n')
#pass
for k in zufangKw:
for p in city:
if kw.find(p) != -1 and kw.find(k) != -1:
count2014 = count2014 + 1
sum2014 = sum2014 + dic2014[kw]
sum2015 = sum2015 + dic2015[kw]
#print sum2015,'isBrand(count 2014uv 2015uv)',c2014,s2014,s2015
print 'notBrand(count 2014uv 2015uv)',count2014,sum2014,sum2015
sys.exit()
#dif KW 词类分布情况统计
for kw in dif2015:
#if kw.find('赶集') != -1:
result.write(kw + '\t' + str(dic2015[kw]) + '\n')
'''c2015 = c2015 + 1
s2015 = s2015 + dic2015[kw]
'''
'''for k in zufangKw:
for p in street:
if kw.find(p) != -1 and kw.find(k) != -1:
count2015 = count2015 + 1
sum2015 = sum2015 + dic2015[kw]'''
print count2014,count2015
print sum2014,sum2015
#print c2014,s2014,c2015,s2015
'''for kw in dic:
result.write(kw + '\t' + str(dic[kw]) + '\n')'''
|
hfutsuchao/Python2.6
|
SEOKeywordsAnalysis/KWComp.py
|
Python
|
gpl-2.0
| 3,699
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.