| repo_name
				 stringlengths 5 92 | path
				 stringlengths 4 232 | copies
				 stringclasses 19
				values | size
				 stringlengths 4 7 | content
				 stringlengths 721 1.04M | license
				 stringclasses 15
				values | hash
				 int64 -9,223,277,421,539,062,000 9,223,102,107B | line_mean
				 float64 6.51 99.9 | line_max
				 int64 15 997 | alpha_frac
				 float64 0.25 0.97 | autogenerated
				 bool 1
				class | 
|---|---|---|---|---|---|---|---|---|---|---|
| 
	agabert/zeus | 
	stages/keystone/fabfile.py | 
	1 | 
	5050 | 
	
import os
from zeus.config import ConfigManager
from zeus.common import FabricManager
from zeus.common import PasswordManager
from zeus.configmanagement import ConfigEditor
from zeus.ubuntu import RepoManager
from zeus.services import ServiceControl
from fabric.api import parallel, roles, run, env
metadata = ConfigManager(os.environ["CONFIGFILE"])
passwords = PasswordManager(os.environ["PASSWORDCACHE"]).passwords
FabricManager.setup(metadata.roles_ports)
#
# http://docs.openstack.org/mitaka/install-guide-ubuntu/keystone-install.html
#
@parallel
@roles('openstack_keystone')
def keystone():
    run("""
echo "manual" > /etc/init/keystone.override
""")
    RepoManager.install("keystone")
    run("""
systemctl stop keystone; echo
systemctl disable keystone; echo
""")
    RepoManager.install("apache2")
    RepoManager.install("libapache2-mod-wsgi")
    ConfigEditor.setKey(
        "/etc/keystone/keystone.conf",
        "DEFAULT",
        "admin_token",
        passwords["ADMIN_TOKEN"])
    ConfigEditor.setKey(
        "/etc/keystone/keystone.conf",
        "database",
        "connection",
        "mysql+pymysql://keystone:%s@%s/keystone" % (
            passwords["KEYSTONE_DBPASS"],
            metadata.servers[metadata.roles['openstack_mysql'][0]]['ip']))
    ConfigEditor.setKey(
        "/etc/keystone/keystone.conf",
        "token",
        "provider",
        "fernet")
    run("""
su -s /bin/sh -c "keystone-manage db_sync" keystone
""")
    run("""
keystone-manage fernet_setup --keystone-user keystone --keystone-group keystone
""")
    run("""
uudecode -o /etc/apache2/apache2.conf <<EOF
begin-base64-encoded 644 YXBhY2hlMi5jb25m
U2VydmVyTmFtZSBYWFhYWFhYWFgKTXV0ZXggZmlsZToke0FQQUNIRV9MT0NL
X0RJUn0gZGVmYXVsdApQaWRGaWxlICR7QVBBQ0hFX1BJRF9GSUxFfQpUaW1l
b3V0IDMwMApLZWVwQWxpdmUgT24KTWF4S2VlcEFsaXZlUmVxdWVzdHMgMTAw
CktlZXBBbGl2ZVRpbWVvdXQgNQpVc2VyICR7QVBBQ0hFX1JVTl9VU0VSfQpH
cm91cCAke0FQQUNIRV9SVU5fR1JPVVB9Ckhvc3RuYW1lTG9va3VwcyBPZmYK
RXJyb3JMb2cgJHtBUEFDSEVfTE9HX0RJUn0vZXJyb3IubG9nCkxvZ0xldmVs
IHdhcm4KSW5jbHVkZU9wdGlvbmFsIG1vZHMtZW5hYmxlZC8qLmxvYWQKSW5j
bHVkZU9wdGlvbmFsIG1vZHMtZW5hYmxlZC8qLmNvbmYKSW5jbHVkZSBwb3J0
cy5jb25mCjxEaXJlY3RvcnkgLz4KCU9wdGlvbnMgRm9sbG93U3ltTGlua3MK
CUFsbG93T3ZlcnJpZGUgTm9uZQoJUmVxdWlyZSBhbGwgZGVuaWVkCjwvRGly
ZWN0b3J5Pgo8RGlyZWN0b3J5IC91c3Ivc2hhcmU+CglBbGxvd092ZXJyaWRl
IE5vbmUKCVJlcXVpcmUgYWxsIGdyYW50ZWQKPC9EaXJlY3Rvcnk+CjxEaXJl
Y3RvcnkgL3Zhci93d3cvPgoJT3B0aW9ucyBJbmRleGVzIEZvbGxvd1N5bUxp
bmtzCglBbGxvd092ZXJyaWRlIE5vbmUKCVJlcXVpcmUgYWxsIGdyYW50ZWQK
PC9EaXJlY3Rvcnk+CkFjY2Vzc0ZpbGVOYW1lIC5odGFjY2Vzcwo8RmlsZXNN
YXRjaCAiXlwuaHQiPgoJUmVxdWlyZSBhbGwgZGVuaWVkCjwvRmlsZXNNYXRj
aD4KTG9nRm9ybWF0ICIldjolcCAlaCAlbCAldSAldCBcIiVyXCIgJT5zICVP
IFwiJXtSZWZlcmVyfWlcIiBcIiV7VXNlci1BZ2VudH1pXCIiIHZob3N0X2Nv
bWJpbmVkCkxvZ0Zvcm1hdCAiJWggJWwgJXUgJXQgXCIlclwiICU+cyAlTyBc
IiV7UmVmZXJlcn1pXCIgXCIle1VzZXItQWdlbnR9aVwiIiBjb21iaW5lZApM
b2dGb3JtYXQgIiVoICVsICV1ICV0IFwiJXJcIiAlPnMgJU8iIGNvbW1vbgpM
b2dGb3JtYXQgIiV7UmVmZXJlcn1pIC0+ICVVIiByZWZlcmVyCkxvZ0Zvcm1h
dCAiJXtVc2VyLWFnZW50fWkiIGFnZW50CkluY2x1ZGVPcHRpb25hbCBjb25m
LWVuYWJsZWQvKi5jb25mCkluY2x1ZGVPcHRpb25hbCBzaXRlcy1lbmFibGVk
LyouY29uZgo=
====
EOF
""")
    this = env.host_string.split(":")[0]
    run("""
sed -i 's,XXXXXXXXX,%s,g;' /etc/apache2/apache2.conf
""" % this)
    run("""
uudecode -o /etc/apache2/sites-available/wsgi-keystone.conf<<EOF
begin-base64-encoded 644 d3NnaS1rZXlzdG9uZS5jb25m
Ckxpc3RlbiA1MDAwCkxpc3RlbiAzNTM1NwoKPFZpcnR1YWxIb3N0ICo6NTAw
MD4KICAgIFdTR0lEYWVtb25Qcm9jZXNzIGtleXN0b25lLXB1YmxpYyBwcm9j
ZXNzZXM9NSB0aHJlYWRzPTEgdXNlcj1rZXlzdG9uZSBncm91cD1rZXlzdG9u
ZSBkaXNwbGF5LW5hbWU9JXtHUk9VUH0KICAgIFdTR0lQcm9jZXNzR3JvdXAg
a2V5c3RvbmUtcHVibGljCiAgICBXU0dJU2NyaXB0QWxpYXMgLyAvdXNyL2Jp
bi9rZXlzdG9uZS13c2dpLXB1YmxpYwogICAgV1NHSUFwcGxpY2F0aW9uR3Jv
dXAgJXtHTE9CQUx9CiAgICBXU0dJUGFzc0F1dGhvcml6YXRpb24gT24KICAg
IEVycm9yTG9nRm9ybWF0ICIle2N1fXQgJU0iCiAgICBFcnJvckxvZyAvdmFy
L2xvZy9hcGFjaGUyL2tleXN0b25lLmxvZwogICAgQ3VzdG9tTG9nIC92YXIv
bG9nL2FwYWNoZTIva2V5c3RvbmVfYWNjZXNzLmxvZyBjb21iaW5lZAoKICAg
IDxEaXJlY3RvcnkgL3Vzci9iaW4+CiAgICAgICAgUmVxdWlyZSBhbGwgZ3Jh
bnRlZAogICAgPC9EaXJlY3Rvcnk+CjwvVmlydHVhbEhvc3Q+Cgo8VmlydHVh
bEhvc3QgKjozNTM1Nz4KICAgIFdTR0lEYWVtb25Qcm9jZXNzIGtleXN0b25l
LWFkbWluIHByb2Nlc3Nlcz01IHRocmVhZHM9MSB1c2VyPWtleXN0b25lIGdy
b3VwPWtleXN0b25lIGRpc3BsYXktbmFtZT0le0dST1VQfQogICAgV1NHSVBy
b2Nlc3NHcm91cCBrZXlzdG9uZS1hZG1pbgogICAgV1NHSVNjcmlwdEFsaWFz
IC8gL3Vzci9iaW4va2V5c3RvbmUtd3NnaS1hZG1pbgogICAgV1NHSUFwcGxp
Y2F0aW9uR3JvdXAgJXtHTE9CQUx9CiAgICBXU0dJUGFzc0F1dGhvcml6YXRp
b24gT24KICAgIEVycm9yTG9nRm9ybWF0ICIle2N1fXQgJU0iCiAgICBFcnJv
ckxvZyAvdmFyL2xvZy9hcGFjaGUyL2tleXN0b25lLmxvZwogICAgQ3VzdG9t
TG9nIC92YXIvbG9nL2FwYWNoZTIva2V5c3RvbmVfYWNjZXNzLmxvZyBjb21i
aW5lZAoKICAgIDxEaXJlY3RvcnkgL3Vzci9iaW4+CiAgICAgICAgUmVxdWly
ZSBhbGwgZ3JhbnRlZAogICAgPC9EaXJlY3Rvcnk+CjwvVmlydHVhbEhvc3Q+
Cgo=
====
EOF
a2enmod wsgi
ln -sf /etc/apache2/sites-available/wsgi-keystone.conf /etc/apache2/sites-enabled
rm -f /var/lib/keystone/keystone.db
""")
    ServiceControl.relaunch("apache2")
    ServiceControl.check("wsgi:keystone")
 | 
	apache-2.0 | 8,810,368,617,381,399,000 | 33.827586 | 81 | 0.852475 | false | 
| 
	GregorCH/ipet | 
	ipet/parsing/StatisticReader_CustomReader.py | 
	1 | 
	6339 | 
	"""
The MIT License (MIT)
Copyright (c) 2018 Zuse Institute Berlin, www.zib.de
Permissions are granted as stated in the license file you have obtained
with this software. If you find the library useful for your purpose,
please refer to README.md for how to cite IPET.
@author: Gregor Hendel
"""
from .StatisticReader import StatisticReader
import re
import builtins
import logging
from ipet import misc
from ipet.concepts.IPETNode import IpetNode
logger = logging.getLogger(__name__)
class CustomReader(StatisticReader):
    """
    Reader to be initialised interactively through IPET or from an interactive python shell
    """
    name = 'CustomReader'
    regexp = 'Custom'
    datakey = 'Custom'
    data = None
    METHOD_FIRST = 1
    METHOD_LAST = 2
    METHOD_SUM = 3
    METHOD_MIN = 4
    METHOD_MAX = 5
    METHOD_COUNT = 6
    str2method = {
                  "first" : METHOD_FIRST,
                  "last" : METHOD_LAST,
                  "sum" : METHOD_SUM,
                  "min" : METHOD_MIN,
                  "max" : METHOD_MAX,
                  "count" : METHOD_COUNT
                  }
    requiredoptions = {
            "datatype" : ["float", "int"],
            "method" : list(str2method.keys())
        }
    def __init__(self, name = None, regpattern = None, datakey = None, index = 0, datatype = "float", method = "last", active = True):
        """
        constructor of a custom reader to parse additional simple solver output from log file context
        Parameters:
        -----------
        name : a name to distinguish this reader from the others
        regpattern : A string or regular expression pattern to detect lines from which output should be read
        datakey : The data key under which the parsed datum gets stored for every problem
        index : The zero-based index of the number in the specified line (only numbers count)
        datatype : choose 'int' or 'float'
        method : how to treat multiple occurrences of this data within one problem; 'count' occurrences or parse 'first', 'last', 'sum', 'min' or 'max'
        """
        IpetNode.__init__(self, active)
        if regpattern is None:
            raise ValueError("Error: No 'regpattern' specified for reader with name %s" % str(name))
        if name in [None, ""]:
            self.name = datakey + "Reader"
            self.username = False
        else:
            self.name = name
            self.username = True
        self.set_datakey(datakey)
        self.set_index(index)
        self.regpattern = regpattern
        self.set_regpattern(regpattern)
        self.method = method
        self.methodint = self.METHOD_LAST
        self.set_method(method)
        self.set_datatype(datatype)
    def getEditableAttributes(self):
        return ['name', 'regpattern', 'datakey', 'index', 'datatype', 'method'] + IpetNode.getEditableAttributes(self)
    def getRequiredOptionsByAttribute(self, attr):
        return self.requiredoptions.get(attr, IpetNode.getRequiredOptionsByAttribute(self, attr))
    def extractStatistic(self, line):
        if self.regexp.search(line):
            logging.debug("Custom Reader {} found match in line \n{}".format(self.name, line.strip()))
            logging.debug("Numerical expression matches: {}".format(", ".join(misc.numericExpression.findall(line))))
            previousdata = self.testrun.getCurrentProblemData(self.datakey)
            if self.methodint == CustomReader.METHOD_COUNT:
                if previousdata is None:
                    self.addData(self.datakey, 1)
                else:
                    self.addData(self.datakey, previousdata + 1)
                return
            try:
                data = misc.getNumberAtIndex(line, self.index)
                data = self.datatypemethod(data)
                if self.methodint == CustomReader.METHOD_FIRST:
                    if previousdata is None:
                        self.addData(self.datakey, data)
                elif self.methodint == CustomReader.METHOD_LAST:
                    self.addData(self.datakey, data)
                elif self.methodint == CustomReader.METHOD_SUM:
                    if previousdata is None:
                        previousdata = 0
                    self.addData(self.datakey, data + previousdata)
                elif self.methodint == CustomReader.METHOD_MIN:
                    if previousdata is None:
                        self.addData(self.datakey, data)
                    elif data < previousdata:
                        self.addData(self.datakey, data)
                elif self.methodint == CustomReader.METHOD_MAX:
                    if previousdata is None:
                        self.addData(self.datakey, data)
                    elif data > previousdata:
                        self.addData(self.datakey, data)
            except:
                logger.warn("Reader %s could not retrieve data at index %d from matching line '%s'", self.getName(), self.index, line)
                pass
        return None
    def setDataType(self, sometype):
        """
        recognizes data types (e.g., 'float' or 'int') and sets reader data type to this value
        """
        try:
            self.datatypemethod = getattr(builtins, sometype)
            self.datatype = sometype
        except:
            logger.debug("Error: Could not recognize data type %s, using float" % sometype)
            self.datatypemethod = float
            self.datatype = 'float'
    def set_datatype(self, datatype):
        self.setDataType(datatype)
    def set_method(self, method):
        self.methodint = self.str2method.get(method, self.methodint)
        self.method = method
    def set_regpattern(self, regpattern):
        self.regexp = re.compile(regpattern)
        self.regpattern = regpattern
    def set_name(self, name):
        if name == self.getName():
            return
        if name in ["", None]:
            self.name = self.datakey + "Reader"
            self.username = False
        else:
            self.name = name
            self.username = True
    def set_datakey(self, datakey):
        self.datakey = datakey
        if not self.username:
            self.name = self.datakey + "Reader"
    def set_index(self, index):
        self.index = int(index)
 | 
	mit | -7,036,275,254,161,661,000 | 31.84456 | 151 | 0.581953 | false | 
| 
	jackfirth/flask-negotiate | 
	setup.py | 
	1 | 
	1312 | 
	"""
flask-negotiate2
===============
Content negotiation utility for Flask apps. Fork of Flask-Negotiate by
Matt Wright (github.com/mattupstate)
Resources
---------
- `Documentation <http://packages.python.org/flask-negotiate2/>`_
- `Issue Tracker <http://github.com/jackfirth/flask-negotiate/issues>`_
- `Code <http://github.com/jackfirth/flask-negotiate/>`_
"""
from setuptools import setup
setup(
    name='flask-negotiate2',
    version='0.2.0',
    url='https://github.com/jackfirth/flask-negotiate',
    license='MIT',
    author='Matthew Wright',
    author_email='[email protected]',
    description='Content negotiation utility for Flask apps',
    long_description=__doc__,
    py_modules=['flask_negotiate2'],
    zip_safe=False,
    include_package_data=True,
    platforms='any',
    install_requires=['flask'],
    test_suite='nose.collector',
    tests_require=['nose'],
    classifiers=[
        'Development Status :: 4 - Beta',
        'Environment :: Web Environment',
        'Intended Audience :: Developers',
        'License :: OSI Approved :: MIT License',
        'Operating System :: OS Independent',
        'Programming Language :: Python',
        'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
        'Topic :: Software Development :: Libraries :: Python Modules'
    ]
)
 | 
	mit | -3,190,932,591,787,335,000 | 28.155556 | 71 | 0.645579 | false | 
| 
	NoneGroupTeam/Let-s-Go | 
	webapp/app/views.py | 
	1 | 
	18587 | 
	from django.contrib.auth import authenticate, logout, login
from django.contrib.auth import get_user_model
from django.http import HttpResponse
from app.models import Label, Place, Guide, Question, Answer
from hashlib import md5
import json
import re
LOGIN_OK_CODE = 200
LOGIN_OK = 'Login success'
LOGOUT_OK_CODE = 201
LOGOUT_OK = 'Logout success'
REG_OK_CODE = 202
REG_OK = 'Regist success'
QUERY_OK_CODE = 203
QUERY_OK = ''
ADD_OK_CODE = 203
ADD_OK = 'Add success'
GET_OK_CODE = 204
GET_OK = ''
HAD_LOGIN_CODE = 301
HAD_LOGIN = 'Had logined'
NOT_LOGIN_CODE = 301
NOT_LOGIN = 'Not login'
NOT_ACTIVE_CODE = 401
NOT_ACTIVE = 'User Not Active'
NOT_MATCH_CODE = 402
NOT_MATCH = 'Username and Password not match'
DATE_ERR_CODE = 411
DATE_ERR = 'Datetime is not allow'
GENDER_ERR_CODE = 412
GENDER_ERR = 'Gender is not allow'
PHONE_ERR_CODE = 413
PHONE_ERR = 'Phone num is not allow'
EMAIL_ERR_CODE = 414
EMAIL_ERR = 'Email is not allow'
PHONE_EX_CODE = 421
PHONE_EX = 'Phone has already regist'
EMAIL_EX_CODE = 422
EMAIL_EX = 'Email has already regist'
UNAME_EX_CODE = 423
UNAME_EX = 'Username has already regist'
NAME_EX_CODE = 424
NAME_EX = 'This name is already exists'
KEY_ERR_CODE = 425
KEY_ERR = 'The Key Error'
ID_ERR_CODE = 426
ID_ERR = 'The ID Error'
TITLE_ERR_CODE = 427
TITLE_ERR = 'The Title Error'
PLACE_ERR_CODE = 428
PLACE_ERR = 'The Place Error'
LABEL_ERR_CODE = 429
LABEL_ERR = 'The Label Error'
NAME_ERR_CODE = 430
NAME_ERR = 'Name Error'
NAME_NEX_CODE = 431
NAME_NEX = 'Name Not exists'
INVALIED_CODE = 501
INVALIED = 'Not support this method'
UN_ERROR_CODE = 502
UN_ERROR = 'Something error'
def index(request):
    return HttpResponse("hello.")
def test(request):
    return HttpResponse("test ok")
def JSON(**kwargs):
    return json.dumps(kwargs)
def user_logout(request):
    if request.user.is_authenticated():
        logout(request)
        data = JSON(code=LOGOUT_OK_CODE, status=True, message=LOGOUT_OK)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=True, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def user_login(request):
    if request.user.is_authenticated():
        data = JSON(code=HAD_LOGIN_CODE, status=True, message=HAD_LOGIN)
        return HttpResponse(data, content_type="application/json")
    if request.method == 'POST':
        username = request.POST.get('username')
        password = request.POST.get('password')
        user = authenticate(username=username, password=password)
        if user is not None:
            if user.is_active:
                message = JSON(user_id=user.id, username=user.username)
                data = JSON(code=LOGIN_OK_CODE, status=True, message=message)
                login(request, user)
            else:
                data = JSON(code=NOT_ACTIVE_CODE, status=False,
                            message=NOT_ACTIVE)
        else:
            data = JSON(code=NOT_MATCH_CODE, status=False, message=NOT_MATCH)
    else:
        data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    return HttpResponse(data, content_type="application/json")
def user_register(request):
    if request.user.is_authenticated():
        data = JSON(code=HAD_LOGIN_CODE, status=False, message=HAD_LOGIN)
    elif not request.method == 'POST':
        data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        username = request.POST.get('username')
        password = request.POST.get('password')
        email = request.POST.get('email')
        phone = request.POST.get('phone')
        gender = request.POST.get('gender')
        birthday = request.POST.get('birthday')
        # check format
        if re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', birthday) is None:
            data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
        elif gender not in {'1', '0'}:
            data = JSON(code=GENDER_ERR_CODE, status=False, message=GENDER_ERR)
        elif re.match(r'(\+\d{1,3})?1\d{10}', phone) is None:
            data = JSON(code=PHONE_ERR_CODE, status=False, message=PHONE_ERR)
        elif re.match(r'[^@\s]+@([^@\s]+\.)+[^@\s]+', email) is None:
            data = JSON(code=EMAIL_ERR_CODE, status=False, message=EMAIL_ERR)
        # database search
        else:
            all_user = get_user_model().objects
            if all_user.filter(phone=phone).count() != 0:
                data = JSON(CODE=PHONE_EX_CODE, status=False, message=PHONE_EX)
            elif all_user.filter(email=email).count() != 0:
                data = JSON(CODE=EMAIL_EX_CODE, status=False, message=EMAIL_EX)
            elif all_user.filter(username=username).count() != 0:
                data = JSON(CODE=UNAME_EX_CODE, status=False, message=UNAME_EX)
            else:
                app_user = get_user_model()
                try:
                    birthday = birthday.replace('.', '-').replace('/', '-')
                    user = app_user.objects.create_user(username=username,
                                                        password=password,
                                                        email=email,
                                                        phone=phone,
                                                        gender=gender,
                                                        birthday=birthday)
                    message = JSON(user_id=user.id, username=user.username)
                    data = JSON(code=REG_OK_CODE, status=True, message=message)
                except Exception as e:
                    print(e)
                    data = JSON(code=UN_ERROR_CODE, status=False,
                                message=UN_ERROR)
    return HttpResponse(data, content_type="application/json")
def guide_add(request):
    if request.user.is_authenticated():
        if request.method == 'POST':
            title = request.POST.get('title')
            content = request.POST.get('content')
            place = request.POST.get('place')
            label = request.POST.getlist('label[]')
            start_time = request.POST.get('start_time')
            end_time = request.POST.get('end_time')
            if len(title) == 0:
                data = JSON(code=TITLE_ERR_CODE, status=False,
                            message=TITLE_ERR)
            elif len(place) == 0:
                data = JSON(code=PLACE_ERR_CODE, status=False,
                            message=PLACE_ERR)
            elif re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', start_time) is None:
                data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
            elif re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', end_time) is None:
                data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
            elif start_time > end_time:
                data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
            elif not Place.objects.filter(id=place):
                data = JSON(code=PLACE_ERR_CODE, status=False,
                            message=PLACE_ERR)
            else:
                label = Label.objects.filter(id__in=label)
                a = Guide(name=title, user=request.user,
                          place=Place.objects.get(id=place), content=content,
                          start_time=start_time, end_time=end_time)
                a.save()
                a.label.add(*label)
                data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def guide_id(request, _id):
    if request.user.is_authenticated():
        try:
            guide = Guide.objects.filter(id=_id)[0]
            labels = []
            for l in guide.label.all():
                labels.append(l.name)
            submit = str(guide.submit.strftime('%Y-%m-%d %H:%M:%S'))
            result = {'title': guide.name, 'username': guide.user.username,
                      'place': guide.place.name, 'labels': labels,
                      'start_time': str(guide.start_time),
                      'end_time': str(guide.end_time),
                      'content': guide.content, 'submit': submit,
                      'pageview': guide.pageview}
            guide.pageview += 1
            guide.save()
            data = JSON(code=GET_OK_CODE, status=True, message=result)
        except IndexError:
            data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def guide_list(request):
    if request.user.is_authenticated():
        if request.method == 'POST':
            start = int(request.POST.get('start'))
            offset = int(request.POST.get('offset'))
            try:
                ans = Guide.objects.order_by('-id')[start:start + offset]
            except IndexError:
                ans = []
            result = []
            for i in ans:
                labels = []
                for l in i.label.all():
                    labels.append(l.name)
                m = md5()
                m.update(i.user.email.encode())
                img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
                _ = {'id': i.id, 'username': i.user.username, 'title': i.name,
                     'place': i.place.name, 'pageview': i.pageview,
                     'labels': labels, 'img': img}
                result.append(_)
            data = JSON(code=QUERY_OK_CODE, status=True, message=result)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def question_add(request):
    if request.user.is_authenticated():
        if request.method == 'POST':
            title = request.POST.get('title')
            content = request.POST.get('content')
            place = request.POST.get('place')
            label = request.POST.getlist('label[]')
            if len(title) == 0:
                data = JSON(code=TITLE_ERR_CODE, status=False,
                            message=TITLE_ERR)
            elif len(place) == 0:
                data = JSON(code=PLACE_ERR_CODE, status=False,
                            message=PLACE_ERR)
            elif not Place.objects.filter(id=place):
                data = JSON(code=PLACE_ERR_CODE, status=False,
                            message=PLACE_ERR)
            else:
                label = Label.objects.filter(id__in=label)
                a = Question(title=title, user=request.user,
                             place=Place.objects.get(id=place),
                             content=content)
                a.save()
                a.label.add(*label)
                data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def question_id(request, _id):
    if request.user.is_authenticated():
        try:
            question = Question.objects.filter(id=_id)[0]
            labels = []
            for l in question.label.all():
                labels.append(l.name)
            answers = []
            for i in Answer.objects.filter(question=question).order_by('-submit'):
                m = md5()
                m.update(i.user.email.encode())
                img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
                _submit = str(i.submit.strftime('%Y-%m-%d %H:%M:%S'))
                _ = {'id': i.id, 'username': i.user.username, 'img': img,
                     'content': i.content, 'submit': _submit}
                answers.append(_)
            submit = str(question.submit.strftime('%Y-%m-%d %H:%M:%S'))
            result = {'title': question.title,
                      'username': question.user.username,
                      'place': question.place.name, 'labels': labels,
                      'content': question.content, 'submit': submit,
                      'answer': answers}
            data = JSON(code=GET_OK_CODE, status=True, message=result)
        except IndexError:
            data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def question_comment(request, _id):
    if request.user.is_authenticated():
        if request.method == 'POST':
            content = request.POST.get('content')
        try:
            question = Question.objects.filter(id=_id)[0]
            answer = Answer(user=request.user, question=question,
                            content=content)
            answer.save()
            data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
        except IndexError:
            data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def question_list(request):
    if request.user.is_authenticated():
        if request.method == 'POST':
            start = int(request.POST.get('start'))
            offset = int(request.POST.get('offset'))
            try:
                ans = Question.objects.order_by('-id')[start:start + offset]
            except IndexError:
                ans = []
            result = []
            for i in ans:
                labels = []
                for l in i.label.all():
                    labels.append(l.name)
                m = md5()
                m.update(i.user.email.encode())
                ans_count = len(Answer.objects.filter(question=i))
                img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
                _ = {'id': i.id, 'username': i.user.username, 'title': i.title,
                     'place': i.place.name, 'answer': ans_count,
                     'labels': labels, 'img': img}
                result.append(_)
            data = JSON(code=QUERY_OK_CODE, status=True, message=result)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def __id(request, _id, model):
    if request.user.is_authenticated():
        try:
            ans = model.objects.filter(id=_id)[0].name
            data = JSON(code=QUERY_OK_CODE, status=True, message=ans)
        except IndexError:
            data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def label_id(request, _id):
    return __id(request, _id, Label)
def place_id(request, _id):
    return __id(request, _id, Place)
def __list(request, model):
    if request.user.is_authenticated():
        ans = list(model.objects.values('id', 'name'))
        data = JSON(code=QUERY_OK_CODE, status=True, message=ans)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def place_list(request):
    return __list(request, Place)
def label_list(request):
    return __list(request, Label)
def user_add_place(request):
    if request.user.is_authenticated():
        if request.method == 'POST':
            if 'name' in request.POST:
                name = request.POST.get('name')
                if len(name) == 0:
                    data = data = JSON(code=NAME_ERR_CODE, status=True,
                                       message=NAME_ERR)
                elif not Place.objects.filter(name=name):
                    data = JSON(code=NAME_NEX_CODE, status=False,
                                message=NAME_NEX)
                else:
                    request.user.place.add(Place.objects.get(name=name))
                    data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
            else:
                data = JSON(code=KEY_ERR_CODE, status=False, message=KEY_ERR)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def __add(request, model):
    if request.user.is_authenticated():
        if request.method == 'POST':
            if 'name' in request.POST:
                name = request.POST.get('name')
                if len(name) == 0:
                    data = data = JSON(code=NAME_ERR_CODE, status=True,
                                       message=NAME_ERR)
                elif model.objects.filter(name=name):
                    data = JSON(code=NAME_EX_CODE, status=False,
                                message=NAME_EX)
                else:
                    add = model(name=name)
                    add.save()
                    data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
            else:
                data = JSON(code=KEY_ERR_CODE, status=False, message=KEY_ERR)
        else:
            data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
def label_add(request):
    return __add(request, Label)
def place_add(request):
    return __add(request, Place)
def user_info(request):
    if request.user.is_authenticated():
        I = request.user
        places = []
        for l in I.place.all():
            places.append(l.name)
        result = {'username': I.username, 'id': I.id,
                  'places': places, 'birthday': str(I.birthday),
                  'gender': I.gender}
        data = JSON(code=GET_OK_CODE, status=True, message=result)
    else:
        data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
    return HttpResponse(data, content_type="application/json")
 | 
	gpl-3.0 | -4,711,005,114,202,626,000 | 38.63113 | 111 | 0.555334 | false | 
| 
	AutorestCI/azure-sdk-for-python | 
	azure-mgmt-servicebus/azure/mgmt/servicebus/operations/operations.py | 
	1 | 
	3676 | 
	# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class Operations(object):
    """Operations operations.
    :param client: Client for service requests.
    :param config: Configuration of service client.
    :param serializer: An object model serializer.
    :param deserializer: An objec model deserializer.
    :ivar api_version: Client API version. Constant value: "2017-04-01".
    """
    models = models
    def __init__(self, client, config, serializer, deserializer):
        self._client = client
        self._serialize = serializer
        self._deserialize = deserializer
        self.api_version = "2017-04-01"
        self.config = config
    def list(
            self, custom_headers=None, raw=False, **operation_config):
        """Lists all of the available ServiceBus REST API operations.
        :param dict custom_headers: headers that will be added to the request
        :param bool raw: returns the direct response alongside the
         deserialized response
        :param operation_config: :ref:`Operation configuration
         overrides<msrest:optionsforoperations>`.
        :return: An iterator like instance of Operation
        :rtype:
         ~azure.mgmt.servicebus.models.OperationPaged[~azure.mgmt.servicebus.models.Operation]
        :raises:
         :class:`ErrorResponseException<azure.mgmt.servicebus.models.ErrorResponseException>`
        """
        def internal_paging(next_link=None, raw=False):
            if not next_link:
                # Construct URL
                url = '/providers/Microsoft.ServiceBus/operations'
                # Construct parameters
                query_parameters = {}
                query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
            else:
                url = next_link
                query_parameters = {}
            # Construct headers
            header_parameters = {}
            header_parameters['Content-Type'] = 'application/json; charset=utf-8'
            if self.config.generate_client_request_id:
                header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
            if custom_headers:
                header_parameters.update(custom_headers)
            if self.config.accept_language is not None:
                header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
            # Construct and send request
            request = self._client.get(url, query_parameters)
            response = self._client.send(
                request, header_parameters, **operation_config)
            if response.status_code not in [200]:
                raise models.ErrorResponseException(self._deserialize, response)
            return response
        # Deserialize response
        deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies)
        if raw:
            header_dict = {}
            client_raw_response = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict)
            return client_raw_response
        return deserialized
 | 
	mit | -6,970,386,953,209,789,000 | 37.291667 | 144 | 0.616703 | false | 
| 
	quantopian/zipline | 
	tests/test_clock.py | 
	1 | 
	5702 | 
	from datetime import time
from unittest import TestCase
import pandas as pd
from trading_calendars import get_calendar
from trading_calendars.utils.pandas_utils import days_at_time
from zipline.gens.sim_engine import (
    MinuteSimulationClock,
    SESSION_START,
    BEFORE_TRADING_START_BAR,
    BAR,
    SESSION_END
)
class TestClock(TestCase):
    @classmethod
    def setUpClass(cls):
        cls.nyse_calendar = get_calendar("NYSE")
        # july 15 is friday, so there are 3 sessions in this range (15, 18, 19)
        cls.sessions = cls.nyse_calendar.sessions_in_range(
            pd.Timestamp("2016-07-15"),
            pd.Timestamp("2016-07-19")
        )
        trading_o_and_c = cls.nyse_calendar.schedule.ix[cls.sessions]
        cls.opens = trading_o_and_c['market_open']
        cls.closes = trading_o_and_c['market_close']
    def test_bts_before_session(self):
        clock = MinuteSimulationClock(
            self.sessions,
            self.opens,
            self.closes,
            days_at_time(self.sessions, time(6, 17), "US/Eastern"),
            False
        )
        all_events = list(clock)
        def _check_session_bts_first(session_label, events, bts_dt):
            minutes = self.nyse_calendar.minutes_for_session(session_label)
            self.assertEqual(393, len(events))
            self.assertEqual(events[0], (session_label, SESSION_START))
            self.assertEqual(events[1], (bts_dt, BEFORE_TRADING_START_BAR))
            for i in range(2, 392):
                self.assertEqual(events[i], (minutes[i - 2], BAR))
            self.assertEqual(events[392], (minutes[-1], SESSION_END))
        _check_session_bts_first(
            self.sessions[0],
            all_events[0:393],
            pd.Timestamp("2016-07-15 6:17", tz='US/Eastern')
        )
        _check_session_bts_first(
            self.sessions[1],
            all_events[393:786],
            pd.Timestamp("2016-07-18 6:17", tz='US/Eastern')
        )
        _check_session_bts_first(
            self.sessions[2],
            all_events[786:],
            pd.Timestamp("2016-07-19 6:17", tz='US/Eastern')
        )
    def test_bts_during_session(self):
        self.verify_bts_during_session(
            time(11, 45), [
                pd.Timestamp("2016-07-15 11:45", tz='US/Eastern'),
                pd.Timestamp("2016-07-18 11:45", tz='US/Eastern'),
                pd.Timestamp("2016-07-19 11:45", tz='US/Eastern')
            ],
            135
        )
    def test_bts_on_first_minute(self):
        self.verify_bts_during_session(
            time(9, 30), [
                pd.Timestamp("2016-07-15 9:30", tz='US/Eastern'),
                pd.Timestamp("2016-07-18 9:30", tz='US/Eastern'),
                pd.Timestamp("2016-07-19 9:30", tz='US/Eastern')
            ],
            1
        )
    def test_bts_on_last_minute(self):
        self.verify_bts_during_session(
            time(16, 00), [
                pd.Timestamp("2016-07-15 16:00", tz='US/Eastern'),
                pd.Timestamp("2016-07-18 16:00", tz='US/Eastern'),
                pd.Timestamp("2016-07-19 16:00", tz='US/Eastern')
            ],
            390
        )
    def verify_bts_during_session(self, bts_time, bts_session_times, bts_idx):
        def _check_session_bts_during(session_label, events, bts_dt):
            minutes = self.nyse_calendar.minutes_for_session(session_label)
            self.assertEqual(393, len(events))
            self.assertEqual(events[0], (session_label, SESSION_START))
            for i in range(1, bts_idx):
                self.assertEqual(events[i], (minutes[i - 1], BAR))
            self.assertEqual(
                events[bts_idx],
                (bts_dt, BEFORE_TRADING_START_BAR)
            )
            for i in range(bts_idx + 1, 391):
                self.assertEqual(events[i], (minutes[i - 2], BAR))
            self.assertEqual(events[392], (minutes[-1], SESSION_END))
        clock = MinuteSimulationClock(
            self.sessions,
            self.opens,
            self.closes,
            days_at_time(self.sessions, bts_time, "US/Eastern"),
            False
        )
        all_events = list(clock)
        _check_session_bts_during(
            self.sessions[0],
            all_events[0:393],
            bts_session_times[0]
        )
        _check_session_bts_during(
            self.sessions[1],
            all_events[393:786],
            bts_session_times[1]
        )
        _check_session_bts_during(
            self.sessions[2],
            all_events[786:],
            bts_session_times[2]
        )
    def test_bts_after_session(self):
        clock = MinuteSimulationClock(
            self.sessions,
            self.opens,
            self.closes,
            days_at_time(self.sessions, time(19, 5), "US/Eastern"),
            False
        )
        all_events = list(clock)
        # since 19:05 Eastern is after the NYSE is closed, we don't emit
        # BEFORE_TRADING_START.  therefore, each day has SESSION_START,
        # 390 BARs, and then SESSION_END
        def _check_session_bts_after(session_label, events):
            minutes = self.nyse_calendar.minutes_for_session(session_label)
            self.assertEqual(392, len(events))
            self.assertEqual(events[0], (session_label, SESSION_START))
            for i in range(1, 391):
                self.assertEqual(events[i], (minutes[i - 1], BAR))
            self.assertEqual(events[-1], (minutes[389], SESSION_END))
        for i in range(0, 2):
            _check_session_bts_after(
                self.sessions[i],
                all_events[(i * 392): ((i + 1) * 392)]
            )
 | 
	apache-2.0 | 564,505,686,380,273,340 | 30.677778 | 79 | 0.539635 | false | 
| 
	AutorestCI/azure-sdk-for-python | 
	azure-mgmt-network/azure/mgmt/network/v2016_12_01/operations/routes_operations.py | 
	1 | 
	17924 | 
	# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class RoutesOperations(object):
    """RoutesOperations operations.
    :param client: Client for service requests.
    :param config: Configuration of service client.
    :param serializer: An object model serializer.
    :param deserializer: An objec model deserializer.
    :ivar api_version: Client API version. Constant value: "2016-12-01".
    """
    models = models
    def __init__(self, client, config, serializer, deserializer):
        self._client = client
        self._serialize = serializer
        self._deserialize = deserializer
        self.api_version = "2016-12-01"
        self.config = config
    def _delete_initial(
            self, resource_group_name, route_table_name, route_name, custom_headers=None, raw=False, **operation_config):
        # Construct URL
        url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}'
        path_format_arguments = {
            'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
            'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
            'routeName': self._serialize.url("route_name", route_name, 'str'),
            'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
        }
        url = self._client.format_url(url, **path_format_arguments)
        # Construct parameters
        query_parameters = {}
        query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
        # Construct headers
        header_parameters = {}
        header_parameters['Content-Type'] = 'application/json; charset=utf-8'
        if self.config.generate_client_request_id:
            header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
        if custom_headers:
            header_parameters.update(custom_headers)
        if self.config.accept_language is not None:
            header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
        # Construct and send request
        request = self._client.delete(url, query_parameters)
        response = self._client.send(request, header_parameters, stream=False, **operation_config)
        if response.status_code not in [200, 202, 204]:
            exp = CloudError(response)
            exp.request_id = response.headers.get('x-ms-request-id')
            raise exp
        if raw:
            client_raw_response = ClientRawResponse(None, response)
            return client_raw_response
    def delete(
            self, resource_group_name, route_table_name, route_name, custom_headers=None, raw=False, **operation_config):
        """Deletes the specified route from a route table.
        :param resource_group_name: The name of the resource group.
        :type resource_group_name: str
        :param route_table_name: The name of the route table.
        :type route_table_name: str
        :param route_name: The name of the route.
        :type route_name: str
        :param dict custom_headers: headers that will be added to the request
        :param bool raw: returns the direct response alongside the
         deserialized response
        :return: An instance of AzureOperationPoller that returns None or
         ClientRawResponse if raw=true
        :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
         ~msrest.pipeline.ClientRawResponse
        :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
        """
        raw_result = self._delete_initial(
            resource_group_name=resource_group_name,
            route_table_name=route_table_name,
            route_name=route_name,
            custom_headers=custom_headers,
            raw=True,
            **operation_config
        )
        if raw:
            return raw_result
        # Construct and send request
        def long_running_send():
            return raw_result.response
        def get_long_running_status(status_link, headers=None):
            request = self._client.get(status_link)
            if headers:
                request.headers.update(headers)
            header_parameters = {}
            header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
            return self._client.send(
                request, header_parameters, stream=False, **operation_config)
        def get_long_running_output(response):
            if response.status_code not in [200, 202, 204]:
                exp = CloudError(response)
                exp.request_id = response.headers.get('x-ms-request-id')
                raise exp
            if raw:
                client_raw_response = ClientRawResponse(None, response)
                return client_raw_response
        long_running_operation_timeout = operation_config.get(
            'long_running_operation_timeout',
            self.config.long_running_operation_timeout)
        return AzureOperationPoller(
            long_running_send, get_long_running_output,
            get_long_running_status, long_running_operation_timeout)
    def get(
            self, resource_group_name, route_table_name, route_name, custom_headers=None, raw=False, **operation_config):
        """Gets the specified route from a route table.
        :param resource_group_name: The name of the resource group.
        :type resource_group_name: str
        :param route_table_name: The name of the route table.
        :type route_table_name: str
        :param route_name: The name of the route.
        :type route_name: str
        :param dict custom_headers: headers that will be added to the request
        :param bool raw: returns the direct response alongside the
         deserialized response
        :param operation_config: :ref:`Operation configuration
         overrides<msrest:optionsforoperations>`.
        :return: Route or ClientRawResponse if raw=true
        :rtype: ~azure.mgmt.network.v2016_12_01.models.Route or
         ~msrest.pipeline.ClientRawResponse
        :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
        """
        # Construct URL
        url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}'
        path_format_arguments = {
            'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
            'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
            'routeName': self._serialize.url("route_name", route_name, 'str'),
            'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
        }
        url = self._client.format_url(url, **path_format_arguments)
        # Construct parameters
        query_parameters = {}
        query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
        # Construct headers
        header_parameters = {}
        header_parameters['Content-Type'] = 'application/json; charset=utf-8'
        if self.config.generate_client_request_id:
            header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
        if custom_headers:
            header_parameters.update(custom_headers)
        if self.config.accept_language is not None:
            header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
        # Construct and send request
        request = self._client.get(url, query_parameters)
        response = self._client.send(request, header_parameters, stream=False, **operation_config)
        if response.status_code not in [200]:
            exp = CloudError(response)
            exp.request_id = response.headers.get('x-ms-request-id')
            raise exp
        deserialized = None
        if response.status_code == 200:
            deserialized = self._deserialize('Route', response)
        if raw:
            client_raw_response = ClientRawResponse(deserialized, response)
            return client_raw_response
        return deserialized
    def _create_or_update_initial(
            self, resource_group_name, route_table_name, route_name, route_parameters, custom_headers=None, raw=False, **operation_config):
        # Construct URL
        url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}'
        path_format_arguments = {
            'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
            'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
            'routeName': self._serialize.url("route_name", route_name, 'str'),
            'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
        }
        url = self._client.format_url(url, **path_format_arguments)
        # Construct parameters
        query_parameters = {}
        query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
        # Construct headers
        header_parameters = {}
        header_parameters['Content-Type'] = 'application/json; charset=utf-8'
        if self.config.generate_client_request_id:
            header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
        if custom_headers:
            header_parameters.update(custom_headers)
        if self.config.accept_language is not None:
            header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
        # Construct body
        body_content = self._serialize.body(route_parameters, 'Route')
        # Construct and send request
        request = self._client.put(url, query_parameters)
        response = self._client.send(
            request, header_parameters, body_content, stream=False, **operation_config)
        if response.status_code not in [200, 201]:
            exp = CloudError(response)
            exp.request_id = response.headers.get('x-ms-request-id')
            raise exp
        deserialized = None
        if response.status_code == 200:
            deserialized = self._deserialize('Route', response)
        if response.status_code == 201:
            deserialized = self._deserialize('Route', response)
        if raw:
            client_raw_response = ClientRawResponse(deserialized, response)
            return client_raw_response
        return deserialized
    def create_or_update(
            self, resource_group_name, route_table_name, route_name, route_parameters, custom_headers=None, raw=False, **operation_config):
        """Creates or updates a route in the specified route table.
        :param resource_group_name: The name of the resource group.
        :type resource_group_name: str
        :param route_table_name: The name of the route table.
        :type route_table_name: str
        :param route_name: The name of the route.
        :type route_name: str
        :param route_parameters: Parameters supplied to the create or update
         route operation.
        :type route_parameters: ~azure.mgmt.network.v2016_12_01.models.Route
        :param dict custom_headers: headers that will be added to the request
        :param bool raw: returns the direct response alongside the
         deserialized response
        :return: An instance of AzureOperationPoller that returns Route or
         ClientRawResponse if raw=true
        :rtype:
         ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2016_12_01.models.Route]
         or ~msrest.pipeline.ClientRawResponse
        :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
        """
        raw_result = self._create_or_update_initial(
            resource_group_name=resource_group_name,
            route_table_name=route_table_name,
            route_name=route_name,
            route_parameters=route_parameters,
            custom_headers=custom_headers,
            raw=True,
            **operation_config
        )
        if raw:
            return raw_result
        # Construct and send request
        def long_running_send():
            return raw_result.response
        def get_long_running_status(status_link, headers=None):
            request = self._client.get(status_link)
            if headers:
                request.headers.update(headers)
            header_parameters = {}
            header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
            return self._client.send(
                request, header_parameters, stream=False, **operation_config)
        def get_long_running_output(response):
            if response.status_code not in [200, 201]:
                exp = CloudError(response)
                exp.request_id = response.headers.get('x-ms-request-id')
                raise exp
            deserialized = self._deserialize('Route', response)
            if raw:
                client_raw_response = ClientRawResponse(deserialized, response)
                return client_raw_response
            return deserialized
        long_running_operation_timeout = operation_config.get(
            'long_running_operation_timeout',
            self.config.long_running_operation_timeout)
        return AzureOperationPoller(
            long_running_send, get_long_running_output,
            get_long_running_status, long_running_operation_timeout)
    def list(
            self, resource_group_name, route_table_name, custom_headers=None, raw=False, **operation_config):
        """Gets all routes in a route table.
        :param resource_group_name: The name of the resource group.
        :type resource_group_name: str
        :param route_table_name: The name of the route table.
        :type route_table_name: str
        :param dict custom_headers: headers that will be added to the request
        :param bool raw: returns the direct response alongside the
         deserialized response
        :param operation_config: :ref:`Operation configuration
         overrides<msrest:optionsforoperations>`.
        :return: An iterator like instance of Route
        :rtype:
         ~azure.mgmt.network.v2016_12_01.models.RoutePaged[~azure.mgmt.network.v2016_12_01.models.Route]
        :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
        """
        def internal_paging(next_link=None, raw=False):
            if not next_link:
                # Construct URL
                url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes'
                path_format_arguments = {
                    'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
                    'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
                    'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
                }
                url = self._client.format_url(url, **path_format_arguments)
                # Construct parameters
                query_parameters = {}
                query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
            else:
                url = next_link
                query_parameters = {}
            # Construct headers
            header_parameters = {}
            header_parameters['Content-Type'] = 'application/json; charset=utf-8'
            if self.config.generate_client_request_id:
                header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
            if custom_headers:
                header_parameters.update(custom_headers)
            if self.config.accept_language is not None:
                header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
            # Construct and send request
            request = self._client.get(url, query_parameters)
            response = self._client.send(
                request, header_parameters, stream=False, **operation_config)
            if response.status_code not in [200]:
                exp = CloudError(response)
                exp.request_id = response.headers.get('x-ms-request-id')
                raise exp
            return response
        # Deserialize response
        deserialized = models.RoutePaged(internal_paging, self._deserialize.dependencies)
        if raw:
            header_dict = {}
            client_raw_response = models.RoutePaged(internal_paging, self._deserialize.dependencies, header_dict)
            return client_raw_response
        return deserialized
 | 
	mit | -8,097,506,177,059,138,000 | 43.81 | 158 | 0.632671 | false | 
| 
	google-research/google-research | 
	neural_guided_symbolic_regression/utils/generate_empirical_distribution_df_test.py | 
	1 | 
	6620 | 
	# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for generate_empirical_distribution_df."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import pandas as pd
import tensorflow.compat.v1 as tf
from neural_guided_symbolic_regression.models import grammar_utils
from neural_guided_symbolic_regression.utils import generate_empirical_distribution_df
class GenerateEmpiricalDistributionDfHelperTest(parameterized.TestCase):
  @parameterized.parameters([
      ([1, 4], 5),
      ([1, 4, 3], 5),
      ([1, 4, 3, 5, 2, 6], 3),
      ([1, 4, 3, 5, 2, 6, 8], 3)
  ])
  def test_get_number_valid_next_step(
      self,
      prod_rules_sequence_indices,
      expected):
    grammar = grammar_utils.load_grammar(
        grammar_path='third_party/google_research/google_research/'
        'neural_guided_symbolic_regression/grammar/'
        'univariate_one_constant_grammar.txt')
    number_valid_next_step = (
        generate_empirical_distribution_df.get_number_valid_next_step(
            prod_rules_sequence_indices, grammar))
    self.assertEqual(number_valid_next_step, expected)
class GenerateEmpiricalDistributionDfMainTest(parameterized.TestCase):
  def setUp(self):
    super(GenerateEmpiricalDistributionDfMainTest, self).setUp()
    # Production rule sequence of ( 1 ) is 1,6,7,6,9.
    # Production rule sequence of ( x ) is 1,6,7,6,8.
    self.expression_df = pd.DataFrame(
        {'expression_string': ['( 1 )', '( x )'],
         'leading_at_0': [0, 1],
         'leading_at_inf': [0, 1]})
    self.grammar = grammar_utils.load_grammar(
        grammar_path='third_party/google_research/google_research/'
        'neural_guided_symbolic_regression/grammar/'
        'univariate_one_constant_grammar.txt')
    self.max_length = 11
  def test_get_partial_sequence_df(self):
    partial_sequence_df = (
        generate_empirical_distribution_df.get_partial_sequence_df(
            self.expression_df, self.grammar, self.max_length))
    expected_partial_sequence_indices = ['1', '1_6', '1_6_7', '1_6_7_6',
                                         '1', '1_6', '1_6_7', '1_6_7_6']
    self.assertListEqual(
        list(partial_sequence_df['partial_sequence_indices'].values),
        expected_partial_sequence_indices)
  @parameterized.parameters([
      (None,
       'partial_sequence_indices',
       ['1', 0, 0],
       [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
      (None,
       'partial_sequence_indices',
       ['1_6', 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 1, 0, 0]),
      (None,
       'partial_sequence_indices',
       ['1_6_7', 0, 0],
       [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
      (None,
       'partial_sequence_indices',
       ['1_6_7_6', 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
      (2,
       'tail_partial_sequence_indices',
       ['1', 0, 0],
       [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
      (2,
       'tail_partial_sequence_indices',
       ['1_6', 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 1, 0, 0]),
      (2,
       'tail_partial_sequence_indices',
       ['6_7', 0, 0],
       [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
      (2,
       'tail_partial_sequence_indices',
       ['7_6', 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
  ])
  def test_get_empirical_distribution_df(self,
                                         tail_length,
                                         level_name,
                                         multi_index_to_check,
                                         expected_probabilities):
    properties = ['leading_at_0', 'leading_at_inf']
    num_production_rules = len(self.grammar.prod_rules)
    partial_sequence_df = (
        generate_empirical_distribution_df.get_partial_sequence_df(
            self.expression_df, self.grammar, self.max_length))
    empirical_distribution_df = (
        generate_empirical_distribution_df.get_empirical_distribution_df(
            partial_sequence_df, properties, num_production_rules, tail_length))
    levels = [level_name] + properties
    np.testing.assert_array_almost_equal(
        empirical_distribution_df.xs(multi_index_to_check,
                                     level=levels).values[0],
        expected_probabilities)
  def test_get_empirical_distribution_df_without_condition(self):
    num_production_rules = len(self.grammar.prod_rules)
    partial_sequence_df = (
        generate_empirical_distribution_df.get_partial_sequence_df(
            self.expression_df, self.grammar, self.max_length))
    empirical_distribution_df = (
        generate_empirical_distribution_df.get_empirical_distribution_df(
            partial_sequence_df, [], num_production_rules, None))
    expected = pd.DataFrame(
        np.array([[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
                  [0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
                  [0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
                  [0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0.5]]),
        columns=range(10))
    expected['partial_sequence_indices'] = ['1', '1_6', '1_6_7', '1_6_7_6']
    expected.set_index('partial_sequence_indices', inplace=True)
    pd.testing.assert_frame_equal(empirical_distribution_df,
                                  expected,
                                  check_dtype=False,
                                  check_index_type=False,
                                  check_column_type=False,
                                  check_names=False)
  @parameterized.parameters([
      ('1_6_7_6', 1, '6'),
      ('1_6_7_6', 2, '7_6'),
      ('1_6', 3, '1_6'),
  ])
  def test_extract_tail_partial_sequence(self,
                                         partial_sequence_string,
                                         tail_length,
                                         expected):
    tail_partial_sequence_string = (
        generate_empirical_distribution_df.extract_tail_partial_sequence(
            partial_sequence_string, tail_length))
    self.assertEqual(tail_partial_sequence_string, expected)
if __name__ == '__main__':
  tf.test.main()
 | 
	apache-2.0 | -8,822,368,325,674,619,000 | 37.71345 | 86 | 0.575378 | false | 
| 
	p-montero/py-ans | 
	class8/ex7.py | 
	1 | 
	1695 | 
	#!/usr/bin/env python
'''
Use processes and Netmiko to connect to each of the devices in the database.
Execute 'show version' on each device. Record the amount of time required to do this.
DISCLAIMER NOTE: Solution is limited to the exercise's scope
'''
from net_system.models import NetworkDevice
import django
from multiprocessing import Process
from termcolor import colored
from datetime import datetime
from netmiko import ConnectHandler
def sh_ver(a_device):
# Execute cmd with NETMIKO
    creds = a_device.credentials
    rem_conn_ssh = ConnectHandler(device_type=a_device.device_type, ip=a_device.ip_address, username=creds.username,
                                 password=creds.password, port=a_device.port, secret='')
    # Output cmd
    output = rem_conn_ssh.send_command_expect("show version")
    print "\n <<--------------------------->> \n "+ colored(output, 'green') + "\n"
def main():
# Main function to connect to the devices using NETMIKO and execute a cmd. Multi-processing support.
    django.setup()
# Record start time
    process = []
    start_time = datetime.now()
    pylab_devices = NetworkDevice.objects.all()
    for a_device in pylab_devices:
        # Create a PROCESS for each device connection/cmd
        node_process = Process(target=sh_ver, args=(a_device,))
        # Start the THREAD
        node_process.start()
        process.append(node_process)
    for any_process in process:
        print "Notice: " + colored(any_process, 'red')
        any_process.join()
# Function sh_ver runtime calculation
    runtime = datetime.now() - start_time
    print "This operation required " + colored(runtime, 'blue')
if __name__ == "__main__":
    main()
 | 
	apache-2.0 | 5,557,594,805,842,516,000 | 35.06383 | 116 | 0.676106 | false | 
| 
	disco-stu/pcs | 
	pcs/test/test_utils.py | 
	1 | 
	58548 | 
	import os
import sys
import shutil
import unittest
import xml.dom.minidom
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parentdir)
from pcs_test_functions import pcs, ac
import utils
empty_cib = "empty.xml"
temp_cib = "temp.xml"
class UtilsTest(unittest.TestCase):
    def get_cib_empty(self):
        return xml.dom.minidom.parse("empty.xml")
    def get_cib_resources(self):
        cib_dom = self.get_cib_empty()
        new_resources = xml.dom.minidom.parseString("""
            <resources>
                  <primitive id="myResource"
                        class="ocf" provider="heartbeat" type="Dummy">
                  </primitive>
                  <clone id="myClone">
                      <primitive id="myClonedResource"
                          class="ocf" provider="heartbeat" type="Dummy">
                      </primitive>
                  </clone>
                  <master id="myMaster">
                      <primitive id="myMasteredResource"
                            class="ocf" provider="heartbeat" type="Dummy">
                      </primitive>
                  </master>
                  <group id="myGroup">
                      <primitive id="myGroupedResource"
                            class="ocf" provider="heartbeat" type="Dummy">
                      </primitive>
                  </group>
                  <clone id="myGroupClone">
                      <group id="myClonedGroup">
                          <primitive id="myClonedGroupedResource"
                                class="ocf" provider="heartbeat" type="Dummy">
                          </primitive>
                      </group>
                  </clone>
                  <master id="myGroupMaster">
                      <group id="myMasteredGroup">
                          <primitive id="myMasteredGroupedResource"
                                class="ocf" provider="heartbeat" type="Dummy">
                          </primitive>
                      </group>
                  </master>
            </resources>
        """).documentElement
        resources = cib_dom.getElementsByTagName("resources")[0]
        resources.parentNode.replaceChild(new_resources, resources)
        return cib_dom
    def testDomGetResources(self):
        def test_dom_get(method, dom, ok_ids, bad_ids):
            for element_id in ok_ids:
                self.assert_element_id(method(dom, element_id), element_id)
            for element_id in bad_ids:
                self.assertFalse(method(dom, element_id))
        cib_dom = self.get_cib_empty()
        self.assertFalse(utils.dom_get_resource(cib_dom, "myResource"))
        self.assertFalse(
            utils.dom_get_resource_clone(cib_dom, "myClonedResource")
        )
        self.assertFalse(
            utils.dom_get_resource_masterslave(cib_dom, "myMasteredResource")
        )
        self.assertFalse(utils.dom_get_group(cib_dom, "myGroup"))
        self.assertFalse(utils.dom_get_group_clone(cib_dom, "myClonedGroup"))
        self.assertFalse(
            utils.dom_get_group_masterslave(cib_dom, "myMasteredGroup")
        )
        self.assertFalse(utils.dom_get_clone(cib_dom, "myClone"))
        self.assertFalse(utils.dom_get_master(cib_dom, "myMaster"))
        self.assertFalse(utils.dom_get_clone_ms_resource(cib_dom, "myClone"))
        self.assertFalse(utils.dom_get_clone_ms_resource(cib_dom, "myMaster"))
        self.assertFalse(
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myClonedResource")
        )
        self.assertFalse(
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myMasteredResource")
        )
        cib_dom = self.get_cib_resources()
        all_ids = set([
            "none", "myResource",
            "myClone", "myClonedResource",
            "myMaster", "myMasteredResource",
            "myGroup", "myGroupedResource",
            "myGroupClone", "myClonedGroup", "myClonedGroupedResource",
            "myGroupMaster", "myMasteredGroup", "myMasteredGroupedResource",
        ])
        resource_ids = set([
            "myResource",
            "myClonedResource", "myGroupedResource", "myMasteredResource",
            "myClonedGroupedResource", "myMasteredGroupedResource"
        ])
        test_dom_get(
            utils.dom_get_resource, cib_dom,
            resource_ids, all_ids - resource_ids
        )
        cloned_ids = set(["myClonedResource", "myClonedGroupedResource"])
        test_dom_get(
            utils.dom_get_resource_clone, cib_dom,
            cloned_ids, all_ids - cloned_ids
        )
        mastered_ids = set(["myMasteredResource", "myMasteredGroupedResource"])
        test_dom_get(
            utils.dom_get_resource_masterslave, cib_dom,
            mastered_ids, all_ids - mastered_ids
        )
        group_ids = set(["myGroup", "myClonedGroup", "myMasteredGroup"])
        test_dom_get(
            utils.dom_get_group, cib_dom, group_ids, all_ids - group_ids
        )
        cloned_group_ids = set(["myClonedGroup"])
        test_dom_get(
            utils.dom_get_group_clone, cib_dom,
            cloned_group_ids, all_ids - cloned_group_ids
        )
        clone_ids = set(["myClone", "myGroupClone"])
        test_dom_get(
            utils.dom_get_clone, cib_dom,
            clone_ids, all_ids - clone_ids
        )
        mastered_group_ids = set(["myMasteredGroup"])
        test_dom_get(
            utils.dom_get_group_masterslave, cib_dom,
            mastered_group_ids, all_ids - mastered_group_ids
        )
        master_ids = set(["myMaster", "myGroupMaster"])
        test_dom_get(
            utils.dom_get_master, cib_dom,
            master_ids, all_ids - master_ids
        )
        self.assert_element_id(
            utils.dom_get_clone_ms_resource(cib_dom, "myClone"),
            "myClonedResource"
        )
        self.assert_element_id(
            utils.dom_get_clone_ms_resource(cib_dom, "myGroupClone"),
            "myClonedGroup"
        )
        self.assert_element_id(
            utils.dom_get_clone_ms_resource(cib_dom, "myMaster"),
            "myMasteredResource"
        )
        self.assert_element_id(
            utils.dom_get_clone_ms_resource(cib_dom, "myGroupMaster"),
            "myMasteredGroup"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myClonedResource"),
            "myClone"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myClonedGroup"),
            "myGroupClone"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(
                cib_dom, "myClonedGroupedResource"
            ),
            "myGroupClone"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(
                cib_dom, "myMasteredResource"
            ),
            "myMaster"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(
                cib_dom, "myMasteredGroup"
            ),
            "myGroupMaster"
        )
        self.assert_element_id(
            utils.dom_get_resource_clone_ms_parent(
                cib_dom, "myMasteredGroupedResource"
            ),
            "myGroupMaster"
        )
        self.assertEquals(
            None,
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myResource")
        )
        self.assertEquals(
            None,
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myGroup")
        )
        self.assertEquals(
            None,
            utils.dom_get_resource_clone_ms_parent(cib_dom, "myGroupedResource")
        )
    def testDomGetResourceRemoteNodeName(self):
        dom = xml.dom.minidom.parse("empty.xml")
        new_resources = xml.dom.minidom.parseString("""
            <resources>
                <primitive id="dummy1"
                        class="ocf" provider="heartbeat" type="Dummy">
                </primitive>
                <primitive class="ocf" id="vm-guest1" provider="heartbeat"
                        type="VirtualDomain">
                    <instance_attributes id="vm-guest1-instance_attributes">
                        <nvpair id="vm-guest1-instance_attributes-hypervisor"
                            name="hypervisor" value="qemu:///system"/>
                        <nvpair id="vm-guest1-instance_attributes-config"
                            name="config" value="/root/guest1.xml"/>
                    </instance_attributes>
                    <meta_attributes id="vm-guest1-meta_attributes">
                        <nvpair id="vm-guest1-meta_attributes-remote-node"
                            name="remote-node" value="guest1"/>
                    </meta_attributes>
                </primitive>
                <primitive id="dummy2"
                        class="ocf" provider="heartbeat" type="Dummy">
                    <instance_attributes id="vm-guest1-meta_attributes">
                        <nvpair id="dummy2-remote-node"
                            name="remote-node" value="guest2"/>
                    </instance_attributes>
                </primitive>
            </resources>
        """).documentElement
        resources = dom.getElementsByTagName("resources")[0]
        resources.parentNode.replaceChild(new_resources, resources)
        self.assertEquals(
            None,
            utils.dom_get_resource_remote_node_name(
                utils.dom_get_resource(dom, "dummy1")
            )
        )
        self.assertEquals(
            None,
            utils.dom_get_resource_remote_node_name(
                utils.dom_get_resource(dom, "dummy2")
            )
        )
        self.assertEquals(
            "guest1",
            utils.dom_get_resource_remote_node_name(
                utils.dom_get_resource(dom, "vm-guest1")
            )
        )
    def test_dom_get_meta_attr_value(self):
        dom = xml.dom.minidom.parse("empty.xml")
        new_resources = xml.dom.minidom.parseString("""
            <resources>
                <primitive id="dummy1"
                        class="ocf" provider="heartbeat" type="Dummy">
                </primitive>
                <primitive class="ocf" id="vm-guest1" provider="heartbeat"
                        type="VirtualDomain">
                    <instance_attributes id="vm-guest1-instance_attributes">
                        <nvpair id="vm-guest1-instance_attributes-hypervisor"
                            name="hypervisor" value="qemu:///system"/>
                        <nvpair id="vm-guest1-instance_attributes-config"
                            name="config" value="/root/guest1.xml"/>
                    </instance_attributes>
                    <meta_attributes id="vm-guest1-meta_attributes">
                        <nvpair id="vm-guest1-meta_attributes-remote-node"
                            name="remote-node" value="guest1"/>
                    </meta_attributes>
                </primitive>
                <primitive id="dummy2"
                        class="ocf" provider="heartbeat" type="Dummy">
                    <instance_attributes id="vm-guest1-meta_attributes">
                        <nvpair id="dummy2-remote-node"
                            name="remote-node" value="guest2"/>
                    </instance_attributes>
                </primitive>
            </resources>
        """).documentElement
        resources = dom.getElementsByTagName("resources")[0]
        resources.parentNode.replaceChild(new_resources, resources)
        self.assertEquals(
            None,
            utils.dom_get_meta_attr_value(
                utils.dom_get_resource(dom, "dummy1"), "foo"
            )
        )
        self.assertEquals(
            None,
            utils.dom_get_meta_attr_value(
                utils.dom_get_resource(dom, "dummy2"), "remote-node"
            )
        )
        self.assertEquals(
            "guest1",
            utils.dom_get_meta_attr_value(
                utils.dom_get_resource(dom, "vm-guest1"), "remote-node"
            )
        )
        self.assertEquals(
            None,
            utils.dom_get_meta_attr_value(
                utils.dom_get_resource(dom, "vm-guest1"), "foo"
            )
        )
    def testGetElementWithId(self):
        dom = xml.dom.minidom.parseString("""
            <aa>
                <bb id="bb1"/>
                <bb/>
                <bb id="bb2">
                    <cc id="cc1"/>
                </bb>
                <bb id="bb3">
                    <cc id="cc2"/>
                </bb>
            </aa>
        """).documentElement
        self.assert_element_id(
            utils.dom_get_element_with_id(dom, "bb", "bb1"), "bb1"
        )
        self.assert_element_id(
            utils.dom_get_element_with_id(dom, "bb", "bb2"), "bb2"
        )
        self.assert_element_id(
            utils.dom_get_element_with_id(dom, "cc", "cc1"), "cc1"
        )
        self.assert_element_id(
            utils.dom_get_element_with_id(
                utils.dom_get_element_with_id(dom, "bb", "bb2"),
                "cc",
                "cc1"
            ),
            "cc1"
        )
        self.assertEquals(None, utils.dom_get_element_with_id(dom, "dd", "bb1"))
        self.assertEquals(None, utils.dom_get_element_with_id(dom, "bb", "bb4"))
        self.assertEquals(None, utils.dom_get_element_with_id(dom, "bb", "cc1"))
        self.assertEquals(
            None,
            utils.dom_get_element_with_id(
                utils.dom_get_element_with_id(dom, "bb", "bb2"),
                "cc",
                "cc2"
            )
        )
    def test_dom_get_parent_by_tag_name(self):
        dom = xml.dom.minidom.parseString("""
            <aa id="aa1">
                <bb id="bb1"/>
                <bb id="bb2">
                    <cc id="cc1"/>
                </bb>
                <bb id="bb3">
                    <cc id="cc2"/>
                </bb>
                <dd id="dd1" />
            </aa>
        """).documentElement
        bb1 = utils.dom_get_element_with_id(dom, "bb", "bb1")
        cc1 = utils.dom_get_element_with_id(dom, "cc", "cc1")
        self.assert_element_id(
            utils.dom_get_parent_by_tag_name(bb1, "aa"),
            "aa1"
        )
        self.assert_element_id(
            utils.dom_get_parent_by_tag_name(cc1, "aa"),
            "aa1"
        )
        self.assert_element_id(
            utils.dom_get_parent_by_tag_name(cc1, "bb"),
            "bb2"
        )
        self.assertEquals(None, utils.dom_get_parent_by_tag_name(bb1, "cc"))
        self.assertEquals(None, utils.dom_get_parent_by_tag_name(cc1, "dd"))
        self.assertEquals(None, utils.dom_get_parent_by_tag_name(cc1, "ee"))
    def testValidateConstraintResource(self):
        dom = self.get_cib_resources()
        self.assertEquals(
            (True, "", "myClone"),
            utils.validate_constraint_resource(dom, "myClone")
        )
        self.assertEquals(
            (True, "", "myGroupClone"),
            utils.validate_constraint_resource(dom, "myGroupClone")
        )
        self.assertEquals(
            (True, "", "myMaster"),
            utils.validate_constraint_resource(dom, "myMaster")
        )
        self.assertEquals(
            (True, "", "myGroupMaster"),
            utils.validate_constraint_resource(dom, "myGroupMaster")
        )
        self.assertEquals(
            (True, "", "myResource"),
            utils.validate_constraint_resource(dom, "myResource")
        )
        self.assertEquals(
            (True, "", "myGroup"),
            utils.validate_constraint_resource(dom, "myGroup")
        )
        self.assertEquals(
            (True, "", "myGroupedResource"),
            utils.validate_constraint_resource(dom, "myGroupedResource")
        )
        self.assertEquals(
            (False, "Resource 'myNonexistent' does not exist", None),
            utils.validate_constraint_resource(dom, "myNonexistent")
        )
        message = (
            "%s is a clone resource, you should use the clone id: "
            "%s when adding constraints. Use --force to override."
        )
        self.assertEquals(
            (
                False,
                message % ("myClonedResource", "myClone"),
                "myClone"
            ),
            utils.validate_constraint_resource(dom, "myClonedResource")
        )
        self.assertEquals(
            (
                False,
                message % ("myClonedGroup", "myGroupClone"),
                "myGroupClone"
            ),
            utils.validate_constraint_resource(dom, "myClonedGroup")
        )
        self.assertEquals(
            (
                False,
                message % ("myClonedGroupedResource", "myGroupClone"),
                "myGroupClone"
            ),
            utils.validate_constraint_resource(dom, "myClonedGroupedResource")
        )
        message = (
            "%s is a master/slave resource, you should use the master id: "
            "%s when adding constraints. Use --force to override."
        )
        self.assertEquals(
            (
                False,
                message % ("myMasteredResource", "myMaster"),
                "myMaster"
            ),
            utils.validate_constraint_resource(dom, "myMasteredResource")
        )
        self.assertEquals(
            (
                False,
                message % ("myMasteredGroup", "myGroupMaster"),
                "myGroupMaster"
            ),
            utils.validate_constraint_resource(dom, "myMasteredGroup")
        )
        self.assertEquals(
            (
                False,
                message % ("myMasteredGroupedResource", "myGroupMaster"),
                "myGroupMaster"
            ),
            utils.validate_constraint_resource(dom, "myMasteredGroupedResource")
        )
        utils.pcs_options["--force"] = True
        self.assertEquals(
            (True, "", "myClone"),
            utils.validate_constraint_resource(dom, "myClonedResource")
        )
        self.assertEquals(
            (True, "", "myGroupClone"),
            utils.validate_constraint_resource(dom, "myClonedGroup")
        )
        self.assertEquals(
            (True, "", "myGroupClone"),
            utils.validate_constraint_resource(dom, "myClonedGroupedResource")
        )
        self.assertEquals(
            (True, "", "myMaster"),
            utils.validate_constraint_resource(dom, "myMasteredResource")
        )
        self.assertEquals(
            (True, "", "myGroupMaster"),
            utils.validate_constraint_resource(dom, "myMasteredGroup")
        )
        self.assertEquals(
            (True, "", "myGroupMaster"),
            utils.validate_constraint_resource(dom, "myMasteredGroupedResource")
        )
    def testValidateXmlId(self):
        self.assertEquals((True, ""), utils.validate_xml_id("dummy"))
        self.assertEquals((True, ""), utils.validate_xml_id("DUMMY"))
        self.assertEquals((True, ""), utils.validate_xml_id("dUmMy"))
        self.assertEquals((True, ""), utils.validate_xml_id("dummy0"))
        self.assertEquals((True, ""), utils.validate_xml_id("dum0my"))
        self.assertEquals((True, ""), utils.validate_xml_id("dummy-"))
        self.assertEquals((True, ""), utils.validate_xml_id("dum-my"))
        self.assertEquals((True, ""), utils.validate_xml_id("dummy."))
        self.assertEquals((True, ""), utils.validate_xml_id("dum.my"))
        self.assertEquals((True, ""), utils.validate_xml_id("_dummy"))
        self.assertEquals((True, ""), utils.validate_xml_id("dummy_"))
        self.assertEquals((True, ""), utils.validate_xml_id("dum_my"))
        self.assertEquals(
            (False, "test id cannot be empty"),
            utils.validate_xml_id("", "test id")
        )
        msg = "invalid test id '%s', '%s' is not a valid first character for a test id"
        self.assertEquals(
            (False, msg % ("0", "0")),
            utils.validate_xml_id("0", "test id")
        )
        self.assertEquals(
            (False, msg % ("-", "-")),
            utils.validate_xml_id("-", "test id")
        )
        self.assertEquals(
            (False, msg % (".", ".")),
            utils.validate_xml_id(".", "test id")
        )
        self.assertEquals(
            (False, msg % (":", ":")),
            utils.validate_xml_id(":", "test id")
        )
        self.assertEquals(
            (False, msg % ("0dummy", "0")),
            utils.validate_xml_id("0dummy", "test id")
        )
        self.assertEquals(
            (False, msg % ("-dummy", "-")),
            utils.validate_xml_id("-dummy", "test id")
        )
        self.assertEquals(
            (False, msg % (".dummy", ".")),
            utils.validate_xml_id(".dummy", "test id")
        )
        self.assertEquals(
            (False, msg % (":dummy", ":")),
            utils.validate_xml_id(":dummy", "test id")
        )
        msg = "invalid test id '%s', '%s' is not a valid character for a test id"
        self.assertEquals(
            (False, msg % ("dum:my", ":")),
            utils.validate_xml_id("dum:my", "test id")
        )
        self.assertEquals(
            (False, msg % ("dummy:", ":")),
            utils.validate_xml_id("dummy:", "test id")
        )
        self.assertEquals(
            (False, msg % ("dum?my", "?")),
            utils.validate_xml_id("dum?my", "test id")
        )
        self.assertEquals(
            (False, msg % ("dummy?", "?")),
            utils.validate_xml_id("dummy?", "test id")
        )
    def testIsIso8601Date(self):
        self.assertTrue(utils.is_iso8601_date("2014-07-03"))
        self.assertTrue(utils.is_iso8601_date("2014-07-03T11:35:14"))
        self.assertTrue(utils.is_iso8601_date("20140703"))
        self.assertTrue(utils.is_iso8601_date("2014-W27-4"))
        self.assertTrue(utils.is_iso8601_date("2014-184"))
        self.assertFalse(utils.is_iso8601_date(""))
        self.assertFalse(utils.is_iso8601_date("foo"))
        self.assertFalse(utils.is_iso8601_date("2014-07-32"))
        self.assertFalse(utils.is_iso8601_date("2014-13-03"))
        self.assertFalse(utils.is_iso8601_date("2014-W27-8"))
        self.assertFalse(utils.is_iso8601_date("2014-367"))
    def test_is_score(self):
        self.assertTrue(utils.is_score("INFINITY"))
        self.assertTrue(utils.is_score("+INFINITY"))
        self.assertTrue(utils.is_score("-INFINITY"))
        self.assertTrue(utils.is_score("0"))
        self.assertTrue(utils.is_score("+0"))
        self.assertTrue(utils.is_score("-0"))
        self.assertTrue(utils.is_score("123"))
        self.assertTrue(utils.is_score("-123"))
        self.assertTrue(utils.is_score("+123"))
        self.assertFalse(utils.is_score(""))
        self.assertFalse(utils.is_score("abc"))
        self.assertFalse(utils.is_score("+abc"))
        self.assertFalse(utils.is_score("-abc"))
        self.assertFalse(utils.is_score("10a"))
        self.assertFalse(utils.is_score("+10a"))
        self.assertFalse(utils.is_score("-10a"))
        self.assertFalse(utils.is_score("a10"))
        self.assertFalse(utils.is_score("+a10"))
        self.assertFalse(utils.is_score("a-10"))
        self.assertFalse(utils.is_score("infinity"))
        self.assertFalse(utils.is_score("+infinity"))
        self.assertFalse(utils.is_score("-infinity"))
        self.assertFalse(utils.is_score("+InFiNiTy"))
        self.assertFalse(utils.is_score("INFINITY10"))
        self.assertFalse(utils.is_score("INFINITY+10"))
        self.assertFalse(utils.is_score("-INFINITY10"))
        self.assertFalse(utils.is_score("+INFINITY+10"))
        self.assertFalse(utils.is_score("10INFINITY"))
        self.assertFalse(utils.is_score("+10+INFINITY"))
    def test_get_timeout_seconds(self):
        self.assertEquals(utils.get_timeout_seconds("10"), 10)
        self.assertEquals(utils.get_timeout_seconds("10s"), 10)
        self.assertEquals(utils.get_timeout_seconds("10min"), 600)
        self.assertEquals(utils.get_timeout_seconds("1a1s"), None)
        self.assertEquals(utils.get_timeout_seconds("10m"), None)
        self.assertEquals(utils.get_timeout_seconds("10mim"), None)
        self.assertEquals(utils.get_timeout_seconds("aaa"), None)
        self.assertEquals(utils.get_timeout_seconds(""), None)
        self.assertEquals(utils.get_timeout_seconds("1a1s", True), "1a1s")
        self.assertEquals(utils.get_timeout_seconds("10m", True), "10m")
        self.assertEquals(utils.get_timeout_seconds("10mim", True), "10mim")
        self.assertEquals(utils.get_timeout_seconds("aaa", True), "aaa")
        self.assertEquals(utils.get_timeout_seconds("", True), "")
    def test_get_default_op_timeout(self):
        shutil.copy(empty_cib, temp_cib)
        utils.usefile = True
        utils.filename = temp_cib
        self.assertEquals(utils.get_default_op_timeout(), 20)
        output, retVal = pcs(temp_cib, "property set default-action-timeout=25")
        self.assertEquals(retVal, 0)
        self.assertEquals(utils.get_default_op_timeout(), 25)
        output, retVal = pcs(temp_cib, "property unset default-action-timeout")
        self.assertEquals(retVal, 0)
        self.assertEquals(utils.get_default_op_timeout(), 20)
        utils.usefile = False
        utils.filename = ""
    def test_get_resource_op_timeout(self):
        shutil.copy(empty_cib, temp_cib)
        utils.usefile = True
        utils.filename = temp_cib
        output, retVal = pcs(temp_cib, "property set default-action-timeout=25")
        ac(output, "")
        self.assertEquals(retVal, 0)
        output, retVal = pcs(
            temp_cib,
            "resource create dummy Dummy op start timeout=33s --no-default-ops"
        )
        ac(output, "")
        self.assertEquals(retVal, 0)
        dom = xml.dom.minidom.parse(temp_cib)
        self.assertEquals(
            utils.get_resource_op_timeout(dom, "dummy", "start"),
            33
        )
        self.assertEquals(
            utils.get_resource_op_timeout(dom, "dummy", "stop"),
            20
        )
        self.assertEquals(
            utils.get_resource_op_timeout(dom, "dummy0", "start"),
            25
        )
        utils.usefile = False
        utils.filename = ""
    def get_cib_status_lrm(self):
        cib_dom = self.get_cib_empty()
        new_status = xml.dom.minidom.parseString("""
<status>
  <node_state id="1" uname="rh70-node1">
    <lrm id="1">
      <lrm_resources>
        <lrm_resource id="dummy" type="Dummy" class="ocf" provider="heartbeat">
          <lrm_rsc_op id="dummy_monitor_30000" operation="monitor" call-id="34"
            rc-code="1" on_node="Xrh70-node1X" exit-reason="test" />
          <lrm_rsc_op id="dummy_stop_0" operation="stop" call-id="32"
            rc-code="0" />
          <lrm_rsc_op id="dummy_start_0" operation="start" call-id="33"
            rc-code="0" />
        </lrm_resource>
      </lrm_resources>
    </lrm>
  </node_state>
  <node_state id="2" uname="rh70-node2">
    <lrm id="2">
      <lrm_resources>
        <lrm_resource id="dummy" type="Dummy" class="ocf" provider="heartbeat">
          <lrm_rsc_op id="dummy_monitor_0" operation="monitor" call-id="5"
            rc-code="1" />
        </lrm_resource>
        <lrm_resource id="dummy1" type="Dummy" class="ocf" provider="heartbeat">
          <lrm_rsc_op id="dummy1_monitor_0" operation="monitor" call-id="3"
            rc-code="0" />
        </lrm_resource>
      </lrm_resources>
    </lrm>
  </node_state>
</status>
        """).documentElement
        status = cib_dom.getElementsByTagName("status")[0]
        status.parentNode.replaceChild(new_status, status)
        return cib_dom
    def test_get_lrm_rsc_op(self):
        dom = self.get_cib_status_lrm()
        op_list = utils.get_lrm_rsc_op(dom, "dummy")
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(
            op_id_list,
            ["dummy_monitor_0", "dummy_stop_0", "dummy_start_0",
                "dummy_monitor_30000",]
        )
        op_list = utils.get_lrm_rsc_op(dom, "dummy", ["monitor"])
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(
            op_id_list,
            ["dummy_monitor_0", "dummy_monitor_30000",]
        )
        op_list = utils.get_lrm_rsc_op(dom, "dummy", ["stop", "start"])
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(
            op_id_list,
            ["dummy_stop_0", "dummy_start_0",]
        )
        op_list = utils.get_lrm_rsc_op(dom, "dummy", last_call_id=30)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(
            op_id_list,
            ["dummy_stop_0", "dummy_start_0", "dummy_monitor_30000",]
        )
        op_list = utils.get_lrm_rsc_op(dom, "dummy", ["monitor"], 30)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(
            op_id_list,
            ["dummy_monitor_30000",]
        )
        op_list = utils.get_lrm_rsc_op(dom, "dummy", last_call_id=340)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
        op_list = utils.get_lrm_rsc_op(dom, "dummy", last_call_id=34)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
        op_list = utils.get_lrm_rsc_op(dom, "dummy0", ["monitor"], 30)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
        op_list = utils.get_lrm_rsc_op(dom, "dummy0", ["monitor"])
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
        op_list = utils.get_lrm_rsc_op(dom, "dummy0", last_call_id=30)
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
        op_list = utils.get_lrm_rsc_op(dom, "dummy0")
        op_id_list = [op.getAttribute("id") for op in op_list]
        self.assertEquals(op_id_list, [])
    def test_get_lrm_rsc_op_failures(self):
        dom = self.get_cib_status_lrm()
        failures = utils.get_lrm_rsc_op_failures(
            utils.get_lrm_rsc_op(dom, "dummy")
        )
        self.assertEquals(
            failures,
            ["rh70-node2: failed", "Xrh70-node1X: test"]
        )
        failures = utils.get_lrm_rsc_op_failures(
            utils.get_lrm_rsc_op(dom, "dummy", ["start"])
        )
        self.assertEquals(failures, [])
        failures = utils.get_lrm_rsc_op_failures(
            utils.get_lrm_rsc_op(dom, "dummy0")
        )
        self.assertEquals(failures, [])
    def test_resource_running_on(self):
        status = xml.dom.minidom.parseString("""
<crm_mon>
    <summary />
    <nodes />
    <resources>
        <resource id="myResource" role="Started">
            <node name="rh70-node1" />
        </resource>
        <clone id="myClone">
            <resource id="myClonedResource" role="Started">
                <node name="rh70-node1" />
            </resource>
            <resource id="myClonedResource" role="Started">
                <node name="rh70-node2" />
            </resource>
            <resource id="myClonedResource" role="Started">
                <node name="rh70-node3" />
            </resource>
        </clone>
        <clone id="myMaster">
            <resource id="myMasteredResource:1" role="Slave">
                <node name="rh70-node2" />
            </resource>
            <resource id="myMasteredResource" role="Slave">
                <node name="rh70-node3" />
            </resource>
            <resource id="myMasteredResource" role="Master">
                <node name="rh70-node1" />
            </resource>
        </clone>
        <group id="myGroup">
             <resource id="myGroupedResource" role="Started">
                 <node name="rh70-node2" />
             </resource>
        </group>
        <clone id="myGroupClone">
            <group id="myClonedGroup:0">
                 <resource id="myClonedGroupedResource" role="Started">
                     <node name="rh70-node1" />
                 </resource>
            </group>
            <group id="myClonedGroup:1">
                 <resource id="myClonedGroupedResource" role="Started">
                     <node name="rh70-node2" />
                 </resource>
            </group>
            <group id="myClonedGroup:2">
                 <resource id="myClonedGroupedResource" role="Started">
                     <node name="rh70-node3" />
                 </resource>
            </group>
            <group id="myClonedGroup:3">
                 <resource id="myClonedGroupedResource" role="Started">
                     <node name="rh70-node3" />
                 </resource>
            </group>
        </clone>
        <clone id="myGroupMaster">
            <group id="myMasteredGroup:0">
                 <resource id="myMasteredGroupedResource" role="Slave">
                     <node name="rh70-node1" />
                 </resource>
            </group>
            <group id="myMasteredGroup:1">
                 <resource id="myMasteredGroupedResource" role="Master">
                     <node name="rh70-node2" />
                 </resource>
            </group>
            <group id="myMasteredGroup:2">
                 <resource id="myMasteredGroupedResource" role="Slave">
                     <node name="rh70-node3" />
                 </resource>
            </group>
        </clone>
        <resource id="myStoppedResource" role="Stopped">
        </resource>
    </resources>
</crm_mon>
        """).documentElement
        self.assertEquals(
            utils.resource_running_on("myResource", status),
            {
                'message':
                    "Resource 'myResource' is running on node rh70-node1.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myClonedResource", status),
            {
                'message':
                    "Resource 'myClonedResource' is running on nodes "
                        "rh70-node1, rh70-node2, rh70-node3.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1", "rh70-node2", "rh70-node3"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myClone", status),
            {
                'message':
                    "Resource 'myClone' is running on nodes "
                        "rh70-node1, rh70-node2, rh70-node3.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1", "rh70-node2", "rh70-node3"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myMasteredResource", status),
            {
                'message':
                    "Resource 'myMasteredResource' is master on node "
                        "rh70-node1; slave on nodes rh70-node2, rh70-node3.",
                'nodes_master': ["rh70-node1"],
                'nodes_slave': ["rh70-node2", "rh70-node3"],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myMaster", status),
            {
                'message':
                    "Resource 'myMaster' is master on node "
                        "rh70-node1; slave on nodes rh70-node2, rh70-node3.",
                'nodes_master': ["rh70-node1"],
                'nodes_slave': ["rh70-node2", "rh70-node3"],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myGroupedResource", status),
            {
                'message':
                    "Resource 'myGroupedResource' is running on node "
                        "rh70-node2.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node2"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myGroup", status),
            {
                'message':
                    "Resource 'myGroup' is running on node "
                        "rh70-node2.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node2"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myClonedGroupedResource", status),
            {
                'message':
                    "Resource 'myClonedGroupedResource' is running on nodes "
                        "rh70-node1, rh70-node2, rh70-node3, rh70-node3.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1", "rh70-node2", "rh70-node3",
                    "rh70-node3"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myClonedGroup", status),
            {
                'message':
                    "Resource 'myClonedGroup' is running on nodes "
                        "rh70-node1, rh70-node2, rh70-node3, rh70-node3.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1", "rh70-node2", "rh70-node3",
                    "rh70-node3"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myGroupClone", status),
            {
                'message':
                    "Resource 'myGroupClone' is running on nodes "
                        "rh70-node1, rh70-node2, rh70-node3, rh70-node3.",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': ["rh70-node1", "rh70-node2", "rh70-node3",
                    "rh70-node3"],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myMasteredGroupedResource", status),
            {
                'message':
                    "Resource 'myMasteredGroupedResource' is master on node "
                        "rh70-node2; slave on nodes rh70-node1, rh70-node3.",
                'nodes_master': ["rh70-node2"],
                'nodes_slave': ["rh70-node1", "rh70-node3"],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myMasteredGroup", status),
            {
                'message':
                    "Resource 'myMasteredGroup' is master on node "
                        "rh70-node2; slave on nodes rh70-node1, rh70-node3.",
                'nodes_master': ["rh70-node2"],
                'nodes_slave': ["rh70-node1", "rh70-node3"],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myGroupMaster", status),
            {
                'message':
                    "Resource 'myGroupMaster' is master on node "
                        "rh70-node2; slave on nodes rh70-node1, rh70-node3.",
                'nodes_master': ["rh70-node2"],
                'nodes_slave': ["rh70-node1", "rh70-node3"],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("notMyResource", status),
            {
                'message':
                    "Resource 'notMyResource' is not running on any node",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': [],
            }
        )
        self.assertEquals(
            utils.resource_running_on("myStoppedResource", status),
            {
                'message':
                    "Resource 'myStoppedResource' is not running on any node",
                'nodes_master': [],
                'nodes_slave': [],
                'nodes_started': [],
            }
        )
    def test_count_expected_resource_instances(self):
        dom = xml.dom.minidom.parse("empty.xml")
        new_resources = xml.dom.minidom.parseString("""
<resources>
    <primitive id="prim1">
    </primitive>
    <group id="group1">
        <primitive id="prim2">
        </primitive>
    </group>
    <clone id="clone1">
        <primitive id="prim3">
        </primitive>
    </clone>
    <clone id="clone2">
        <primitive id="prim4">
        </primitive>
        <meta_attributes>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="3"/>
        </meta_attributes>
    </clone>
    <clone id="clone3">
        <primitive id="prim5">
        </primitive>
        <meta_attributes>
            <nvpair name="clone-max" value="2"/>
            <nvpair name="clone-node-max" value="3"/>
        </meta_attributes>
    </clone>
    <clone id="clone4">
        <primitive id="prim6">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
        </meta_attributes>
    </clone>
    <clone id="clone5">
        <primitive id="prim7">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="2"/>
        </meta_attributes>
    </clone>
    <clone id="clone6">
        <primitive id="prim8">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="4"/>
        </meta_attributes>
    </clone>
    <master id="master1">
        <primitive id="prim9">
        </primitive>
    </master>
    <master id="master2">
        <primitive id="prim10">
        </primitive>
        <meta_attributes>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="3"/>
            <nvpair name="master-max" value="5"/>
            <nvpair name="master-node-max" value="4"/>
        </meta_attributes>
    </master>
    <master id="master3">
        <primitive id="prim11">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="3"/>
        </meta_attributes>
    </master>
    <master id="master4">
        <primitive id="prim12">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="3"/>
            <nvpair name="master-max" value="3"/>
            <nvpair name="master-node-max" value="2"/>
        </meta_attributes>
    </master>
    <master id="master5">
        <primitive id="prim13">
        </primitive>
        <meta_attributes>
            <nvpair name="globally-unique" value="true"/>
            <nvpair name="clone-max" value="9"/>
            <nvpair name="clone-node-max" value="3"/>
            <nvpair name="master-max" value="12"/>
            <nvpair name="master-node-max" value="4"/>
        </meta_attributes>
    </master>
</resources>
        """).documentElement
        resources = dom.getElementsByTagName("resources")[0]
        resources.parentNode.replaceChild(new_resources, resources)
        self.assertEquals(
            1,
            utils.count_expected_resource_instances(
                utils.dom_get_resource(dom, "prim1"), 3
            )
        )
        self.assertEquals(
            1,
            utils.count_expected_resource_instances(
                utils.dom_get_group(dom, "group1"), 3
            )
        )
        self.assertEquals(
            3,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone1"), 3
            )
        )
        self.assertEquals(
            3,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone2"), 3
            )
        )
        self.assertEquals(
            2,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone3"), 3
            )
        )
        self.assertEquals(
            3,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone4"), 3
            )
        )
        self.assertEquals(
            6,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone5"), 3
            )
        )
        self.assertEquals(
            9,
            utils.count_expected_resource_instances(
                utils.dom_get_clone(dom, "clone6"), 3
            )
        )
        self.assertEquals(
            1,
            utils.count_expected_resource_instances(
                utils.dom_get_master(dom, "master1"), 3
            )
        )
        self.assertEquals(
            3,
            utils.count_expected_resource_instances(
                utils.dom_get_master(dom, "master2"), 3
            )
        )
        self.assertEquals(
            1,
            utils.count_expected_resource_instances(
                utils.dom_get_master(dom, "master3"), 3
            )
        )
        self.assertEquals(
            3,
            utils.count_expected_resource_instances(
                utils.dom_get_master(dom, "master4"), 3
            )
        )
        self.assertEquals(
            9,
            utils.count_expected_resource_instances(
                utils.dom_get_master(dom, "master5"), 3
            )
        )
    def test_parse_cman_quorum_info(self):
        parsed = utils.parse_cman_quorum_info("""\
Version: 6.2.0
Config Version: 23
Cluster Name: cluster66
Cluster Id: 22265
Cluster Member: Yes
Cluster Generation: 3612
Membership state: Cluster-Member
Nodes: 3
Expected votes: 3
Total votes: 3
Node votes: 1
Quorum: 2 
Active subsystems: 8
Flags: 
Ports Bound: 0 
Node name: rh66-node2
Node ID: 2
Multicast addresses: 239.192.86.80
Node addresses: 192.168.122.61
---Votes---
1 M 3 rh66-node1
2 M 2 rh66-node2
3 M 1 rh66-node3
""")
        self.assertEquals(True, parsed["quorate"])
        self.assertEquals(2, parsed["quorum"])
        self.assertEquals(
            [
                {"name": "rh66-node1", "votes": 3, "local": False},
                {"name": "rh66-node2", "votes": 2, "local": True},
                {"name": "rh66-node3", "votes": 1, "local": False},
            ],
            parsed["node_list"]
        )
        parsed = utils.parse_cman_quorum_info("""\
Version: 6.2.0
Config Version: 23
Cluster Name: cluster66
Cluster Id: 22265
Cluster Member: Yes
Cluster Generation: 3612
Membership state: Cluster-Member
Nodes: 3
Expected votes: 3
Total votes: 3
Node votes: 1
Quorum: 2 Activity blocked
Active subsystems: 8
Flags: 
Ports Bound: 0 
Node name: rh66-node1
Node ID: 1
Multicast addresses: 239.192.86.80
Node addresses: 192.168.122.61
---Votes---
1 M 3 rh66-node1
2 X 2 rh66-node2
3 X 1 rh66-node3
""")
        self.assertEquals(False, parsed["quorate"])
        self.assertEquals(2, parsed["quorum"])
        self.assertEquals(
            [
                {"name": "rh66-node1", "votes": 3, "local": True},
            ],
            parsed["node_list"]
        )
        parsed = utils.parse_cman_quorum_info("")
        self.assertEquals(None, parsed)
        parsed = utils.parse_cman_quorum_info("""\
Version: 6.2.0
Config Version: 23
Cluster Name: cluster66
Cluster Id: 22265
Cluster Member: Yes
Cluster Generation: 3612
Membership state: Cluster-Member
Nodes: 3
Expected votes: 3
Total votes: 3
Node votes: 1
Quorum: 
Active subsystems: 8
Flags: 
Ports Bound: 0 
Node name: rh66-node2
Node ID: 2
Multicast addresses: 239.192.86.80
Node addresses: 192.168.122.61
---Votes---
1 M 3 rh66-node1
2 M 2 rh66-node2
3 M 1 rh66-node3
""")
        self.assertEquals(None, parsed)
        parsed = utils.parse_cman_quorum_info("""\
Version: 6.2.0
Config Version: 23
Cluster Name: cluster66
Cluster Id: 22265
Cluster Member: Yes
Cluster Generation: 3612
Membership state: Cluster-Member
Nodes: 3
Expected votes: 3
Total votes: 3
Node votes: 1
Quorum: Foo
Active subsystems: 8
Flags: 
Ports Bound: 0 
Node name: rh66-node2
Node ID: 2
Multicast addresses: 239.192.86.80
Node addresses: 192.168.122.61
---Votes---
1 M 3 rh66-node1
2 M 2 rh66-node2
3 M 1 rh66-node3
""")
        self.assertEquals(None, parsed)
        parsed = utils.parse_cman_quorum_info("""\
Version: 6.2.0
Config Version: 23
Cluster Name: cluster66
Cluster Id: 22265
Cluster Member: Yes
Cluster Generation: 3612
Membership state: Cluster-Member
Nodes: 3
Expected votes: 3
Total votes: 3
Node votes: 1
Quorum: 4
Active subsystems: 8
Flags: 
Ports Bound: 0 
Node name: rh66-node2
Node ID: 2
Multicast addresses: 239.192.86.80
Node addresses: 192.168.122.61
---Votes---
1 M 3 rh66-node1
2 M Foo rh66-node2
3 M 1 rh66-node3
""")
        self.assertEquals(None, parsed)
    def test_parse_quorumtool_output(self):
        parsed = utils.parse_quorumtool_output("""\
Quorum information
------------------
Date:             Fri Jan 16 13:03:28 2015
Quorum provider:  corosync_votequorum
Nodes:            3
Node ID:          1
Ring ID:          19860
Quorate:          Yes
Votequorum information
----------------------
Expected votes:   3
Highest expected: 3
Total votes:      3
Quorum:           2
Flags:            Quorate
Membership information
----------------------
    Nodeid      Votes    Qdevice Name
         1          3         NR rh70-node1
         2          2         NR rh70-node2 (local)
         3          1         NR rh70-node3
""")
        self.assertEquals(True, parsed["quorate"])
        self.assertEquals(2, parsed["quorum"])
        self.assertEquals(
            [
                {"name": "rh70-node1", "votes": 3, "local": False},
                {"name": "rh70-node2", "votes": 2, "local": True},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
            parsed["node_list"]
        )
        parsed = utils.parse_quorumtool_output("""\
Quorum information
------------------
Date:             Fri Jan 16 13:03:35 2015
Quorum provider:  corosync_votequorum
Nodes:            1
Node ID:          1
Ring ID:          19868
Quorate:          No
Votequorum information
----------------------
Expected votes:   3
Highest expected: 3
Total votes:      1
Quorum:           2 Activity blocked
Flags:            
Membership information
----------------------
    Nodeid      Votes    Qdevice Name
             1          1         NR rh70-node1 (local)
""")
        self.assertEquals(False, parsed["quorate"])
        self.assertEquals(2, parsed["quorum"])
        self.assertEquals(
            [
                {"name": "rh70-node1", "votes": 1, "local": True},
            ],
            parsed["node_list"]
        )
        parsed = utils.parse_quorumtool_output("")
        self.assertEquals(None, parsed)
        parsed = utils.parse_quorumtool_output("""\
Quorum information
------------------
Date:             Fri Jan 16 13:03:28 2015
Quorum provider:  corosync_votequorum
Nodes:            3
Node ID:          1
Ring ID:          19860
Quorate:          Yes
Votequorum information
----------------------
Expected votes:   3
Highest expected: 3
Total votes:      3
Quorum:           
Flags:            Quorate
Membership information
----------------------
    Nodeid      Votes    Qdevice Name
         1          1         NR rh70-node1 (local)
         2          1         NR rh70-node2
         3          1         NR rh70-node3
""")
        self.assertEquals(None, parsed)
        parsed = utils.parse_quorumtool_output("""\
Quorum information
------------------
Date:             Fri Jan 16 13:03:28 2015
Quorum provider:  corosync_votequorum
Nodes:            3
Node ID:          1
Ring ID:          19860
Quorate:          Yes
Votequorum information
----------------------
Expected votes:   3
Highest expected: 3
Total votes:      3
Quorum:           Foo
Flags:            Quorate
Membership information
----------------------
    Nodeid      Votes    Qdevice Name
         1          1         NR rh70-node1 (local)
         2          1         NR rh70-node2
         3          1         NR rh70-node3
""")
        self.assertEquals(None, parsed)
        parsed = utils.parse_quorumtool_output("""\
Quorum information
------------------
Date:             Fri Jan 16 13:03:28 2015
Quorum provider:  corosync_votequorum
Nodes:            3
Node ID:          1
Ring ID:          19860
Quorate:          Yes
Votequorum information
----------------------
Expected votes:   3
Highest expected: 3
Total votes:      3
Quorum:           2
Flags:            Quorate
Membership information
----------------------
    Nodeid      Votes    Qdevice Name
         1          1         NR rh70-node1 (local)
         2        foo         NR rh70-node2
         3          1         NR rh70-node3
""")
        self.assertEquals(None, parsed)
    def test_is_node_stop_cause_quorum_loss(self):
        quorum_info = {
            "quorate": False,
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 1,
            "node_list": [
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 1,
            "node_list": [
                {"name": "rh70-node3", "votes": 1, "local": True},
            ],
        }
        self.assertEquals(
            True,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": False},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": True},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": False},
                {"name": "rh70-node2", "votes": 2, "local": True},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": True},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            True,
            utils.is_node_stop_cause_quorum_loss(quorum_info, True)
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": True},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(
                quorum_info, False, ["rh70-node3"]
            )
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": True},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(
                quorum_info, False, ["rh70-node2"]
            )
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": True},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            True,
            utils.is_node_stop_cause_quorum_loss(
                quorum_info, False, ["rh70-node1"]
            )
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 4, "local": True},
                {"name": "rh70-node2", "votes": 1, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            False,
            utils.is_node_stop_cause_quorum_loss(
                quorum_info, False, ["rh70-node2", "rh70-node3"]
            )
        )
        quorum_info = {
            "quorate": True,
            "quorum": 4,
            "node_list": [
                {"name": "rh70-node1", "votes": 3, "local": True},
                {"name": "rh70-node2", "votes": 2, "local": False},
                {"name": "rh70-node3", "votes": 1, "local": False},
            ],
        }
        self.assertEquals(
            True,
            utils.is_node_stop_cause_quorum_loss(
                quorum_info, False, ["rh70-node2", "rh70-node3"]
            )
        )
    def assert_element_id(self, node, node_id):
        self.assertTrue(
            isinstance(node, xml.dom.minidom.Element),
            "element with id '%s' not found" % node_id
        )
        self.assertEquals(node.getAttribute("id"), node_id)
if __name__ == "__main__":
    unittest.main()
 | 
	gpl-2.0 | 8,559,471,900,549,907,000 | 33.059337 | 87 | 0.508745 | false | 
| 
	NiloFreitas/Deep-Reinforcement-Learning | 
	tensorblock/recipe/recipe_input.py | 
	1 | 
	3410 | 
	
import tensorflow as tf
import tensorblock as tb
import numpy as np
class recipe_input:
####### Add Input
    def addInput( self , **args ):
        pars = { **self.defs_input , **args }
        pars['name'] = self.add_label(
                self.inputs , 'Input' , pars['name'] , add_order = True )
        pars = self.parse_input_pars( pars )
        if pars['share'] is not None:
            self.inputs.append( [ self.node( pars['share'] ) , pars ] )
        else:
            if pars['tensor'] is None:
                with tf.variable_scope( self.folder + pars['name'] , reuse = False ):
                    self.inputs.append( [ tb.vars.placeholder( shape = pars['shape'] ,
                                                               dtype = pars['dtype'] ) , pars ] )
            else: self.inputs.append( [ pars['tensor'] , pars ] )
        self.curr_input = pars['name']
        return self.inputs[-1][0]
####### Add Variable
    def addVariable( self , **args ):
        pars = { **self.defs_variable , **args }
        pars['name'] = self.add_label(
                self.variables , 'Variable' , pars['name'] , add_order = True )
        pars = self.parse_input_pars( pars )
        if pars['share'] is not None:
            self.variables.append( [ self.node( pars['share'] ) , pars ] )
        else:
            if pars['tensor'] is None:
                with tf.variable_scope( self.folder + pars['name'] , reuse = False ):
                    self.variables.append( [ pars['type']( pars['shape'] , pars ) , pars ] )
            else:
                if callable( pars['tensor'] ):
                    with tf.variable_scope( self.folder + pars['name'] , reuse = False ):
                        self.variables.append( [ pars['tensor']( pars['shape'] , pars ) , pars ] )
                else:
                    if isinstance( pars['tensor'] , np.ndarray ):
                        self.variables.append( [ tb.vars.numpy( pars['tensor'] , pars ) , pars ] )
                    else:
                        self.variables.append( [ pars['tensor'] , pars ] )
        return self.variables[-1][0]
####### Parse Pars
    def parse_input_pars( self , pars ):
        if pars['tensor'] is not None:
            pars['first_none'] = False
            if isinstance( pars['tensor'] , np.ndarray ):
                pars['shape'] = pars['tensor'].shape
            else:
                pars['shape'] = tb.aux.tf_shape( pars['tensor'] )
        if pars['copy'] is not None: # Copying
            pars['type'] = tb.vars.copy
            pars['shape'] = self.node( pars['copy'] )
            copy_pars = self.pars( pars['copy'] )
            pars['out_sides'] = copy_pars['out_sides']
            pars['out_channels'] = copy_pars['out_channels']
        else: # Nothing
            pars['shape'] = list( pars['shape'] )
            if pars['first_none'] and len( pars['shape'] ) > 1: pars['shape'][0] = None
            shape = pars['shape']
            if pars['out_sides'] is None:
                if len( shape ) == 2: pars['out_sides'] = shape[1:2] ;
                if len( shape ) == 4: pars['out_sides'] = shape[1:3] ;
                if len( shape ) == 5: pars['out_sides'] = shape[1:4] ;
            if pars['out_channels'] is None:
                if len( shape ) == 2: pars['out_channels'] = 1
                else: pars['out_channels'] = shape[-1]
        return pars
 | 
	mit | -6,245,500,325,249,844,000 | 36.888889 | 98 | 0.482405 | false | 
| 
	DmitryFillo/berserker_resolver | 
	setup.py | 
	1 | 
	1337 | 
	import os, sys
from setuptools import setup
def read(fname):
    return open(os.path.join(os.path.dirname(__file__), fname)).read()
def dep_dnspython():
    name = 'dnspython'
    if sys.version_info[0] == 3:
        name += '3'
    return name
setup(
    name='berserker_resolver',
    version='2.0.1',
    author='Dmitry Fillo',
    author_email='[email protected]',
    maintainer_email='[email protected]',
    description=('Fast mass dns resolver which can bypass loadbalancers'),
    keywords='dns resolver berserker loadbalancer',
    license='BSD',
    url='https://github.com/DmitryFillo/berserker_resolver',
    packages=['berserker_resolver'],
    install_requires=[dep_dnspython()],
    tests_require=['mock'],
    test_suite='tests.get_suite',
    zip_safe=False,
    platforms='any',
    long_description=read('README.rst'),
    classifiers=[
        'Development Status :: 5 - Production/Stable',
        'Topic :: Internet :: Name Service (DNS)',
        'Topic :: Utilities',
        'Intended Audience :: Developers',
        'License :: OSI Approved :: BSD License',
        'Programming Language :: Python :: 2.6',
        'Programming Language :: Python :: 2.7',
        'Programming Language :: Python :: 3.2',
        'Programming Language :: Python :: 3.3',
        'Programming Language :: Python :: 3.4',
    ],
)
 | 
	bsd-2-clause | 917,970,695,018,627,100 | 30.833333 | 74 | 0.618549 | false | 
| 
	larsks/cobbler-larsks | 
	cobbler/api.py | 
	1 | 
	36927 | 
	"""
python API module for Cobbler
see source for cobbler.py, or pydoc, for example usage.
CLI apps and daemons should import api.py, and no other cobbler code.
Copyright 2006-2009, Red Hat, Inc
Michael DeHaan <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301  USA
"""
import yaml
import config
import utils
import action_sync
import action_check
import action_import
import action_reposync
import action_status
import action_validate
import action_buildiso
import action_replicate
import action_acl
import action_report
import action_power
import action_log
import action_hardlink
import action_dlcontent
from cexceptions import *
try:
    import subprocess as sub_process
except:
    import sub_process
import module_loader
import kickgen
import yumgen
import pxegen
from utils import _
import logging
import time
import random
import os
import xmlrpclib
import traceback
import exceptions
import clogger
import item_distro
import item_profile
import item_system
import item_repo
import item_image
import item_mgmtclass
import item_package
import item_file
ERROR = 100
INFO  = 10
DEBUG = 5
# notes on locking:
# BootAPI is a singleton object
# the XMLRPC variants allow 1 simultaneous request
# therefore we flock on /etc/cobbler/settings for now
# on a request by request basis.
class BootAPI:
    __shared_state = {}
    __has_loaded = False
    # ===========================================================
    def __init__(self, is_cobblerd=False):
        """
        Constructor
        """
        # FIXME: this should be switchable through some simple system
        self.__dict__ = BootAPI.__shared_state
        self.perms_ok = False
        if not BootAPI.__has_loaded:
            if os.path.exists("/etc/cobbler/use.couch"):
                 self.use_couch = True
            else:
                 self.use_couch = False
            # NOTE: we do not log all API actions, because
            # a simple CLI invocation may call adds and such
            # to load the config, which would just fill up
            # the logs, so we'll do that logging at CLI
            # level (and remote.py web service level) instead.
            random.seed()
            self.is_cobblerd = is_cobblerd
            try:
                self.logger = clogger.Logger("/var/log/cobbler/cobbler.log")
            except CX:
                # return to CLI/other but perms are not valid
                # perms_ok is False
                return
            # FIMXE: conslidate into 1 server instance
            self.selinux_enabled = utils.is_selinux_enabled()
            self.dist = utils.check_dist()
            self.os_version = utils.os_release()
            BootAPI.__has_loaded   = True
            module_loader.load_modules()
            self._config         = config.Config(self)
            self.deserialize()
            self.authn = self.get_module_from_file(
                "authentication",
                "module",
                "authn_configfile"
            )
            self.authz  = self.get_module_from_file(
                "authorization",
                "module",
                "authz_allowall"
            )
        
            # FIXME: pass more loggers around, and also see that those
            # using things via tasks construct their own kickgen/yumgen/
            # pxegen versus reusing this one, which has the wrong logger
            # (most likely) for background tasks.
            self.kickgen = kickgen.KickGen(self._config)
            self.yumgen  = yumgen.YumGen(self._config)
            self.pxegen  = pxegen.PXEGen(self._config, logger=self.logger)
            self.logger.debug("API handle initialized")
            self.perms_ok = True
    # ==========================================================
    def is_selinux_enabled(self):
        """
        Returns whether selinux is enabled on the cobbler server.
        We check this just once at cobbler API init time, because
        a restart is required to change this; this does /not/ check
        enforce/permissive, nor does it need to.
        """
        return self.selinux_enabled
    def is_selinux_supported(self):
        """
        Returns whether or not the OS is sufficient enough
        to run with SELinux enabled (currently EL 5 or later).
        """
        self.dist
        if self.dist == "redhat" and self.os_version < 5:
           # doesn't support public_content_t
           return False 
        return True
    # ==========================================================
    def last_modified_time(self):
        """
        Returns the time of the last modification to cobbler, made by any
        API instance, regardless of the serializer type.
        """
        if not os.path.exists("/var/lib/cobbler/.mtime"):
            old = os.umask(0x777)
            fd = open("/var/lib/cobbler/.mtime","w")
            fd.write("0")
            fd.close()
            os.umask(old)
            return 0
        fd = open("/var/lib/cobbler/.mtime")
        data = fd.read().strip()
        return float(data)
    # ==========================================================
    def log(self,msg,args=None,debug=False):
        if debug:
            logger = self.logger.debug
        else:
            logger = self.logger.info 
        if args is None:
            logger("%s" % msg)
        else:
            logger("%s; %s" % (msg, str(args)))
    # ==========================================================
    def version(self, extended=False):
        """
        What version is cobbler?
        If extended == False, returns a float for backwards compatibility
         
        If extended == True, returns a dict:
            gitstamp      -- the last git commit hash
            gitdate       -- the last git commit date on the builder machine
            builddate     -- the time of the build
            version       -- something like "1.3.2"
            version_tuple -- something like [ 1, 3, 2 ]
        """
        fd = open("/etc/cobbler/version")
        ydata = fd.read()
        fd.close()
        data = yaml.load(ydata)
        if not extended:
            # for backwards compatibility and use with koan's comparisons
            elems = data["version_tuple"] 
            return int(elems[0]) + 0.1*int(elems[1]) + 0.001*int(elems[2])
        else:
            return data
    # ==========================================================
    def clear(self):
        """
        Forget about current list of profiles, distros, and systems
        # FIXME: is this used anymore?
        """
        return self._config.clear()
    def __cmp(self,a,b):
        return cmp(a.name,b.name)
    # ==========================================================
    def get_item(self, what, name):
        self.log("get_item",[what,name],debug=True)
        return self._config.get_items(what).get(name)
    # =============================================================
    def get_items(self, what):
        self.log("get_items",[what],debug=True)
        return self._config.get_items(what)
    
    def distros(self):
        """
        Return the current list of distributions
        """
        return self.get_items("distro")
    def profiles(self):
        """
        Return the current list of profiles
        """
        return self.get_items("profile")
    def systems(self):
        """
        Return the current list of systems
        """
        return self.get_items("system")
    def repos(self):
        """
        Return the current list of repos
        """
        return self.get_items("repo")
    def images(self):
        """
        Return the current list of images
        """
        return self.get_items("image")
    def settings(self):
        """
        Return the application configuration
        """
        return self._config.settings()
    
    def mgmtclasses(self):
        """
        Return the current list of mgmtclasses
        """
        return self.get_items("mgmtclass")
    
    def packages(self):
        """
        Return the current list of packages
        """
        return self.get_items("package")
    
    def files(self):
        """
        Return the current list of files
        """
        return self.get_items("file")
    # =======================================================================
    def update(self):
        """
        This can be called is no longer used by cobbler.
        And is here to just avoid breaking older scripts.
        """
        return True
    
    # ========================================================================
    def copy_item(self, what, ref, newname, logger=None):
        self.log("copy_item(%s)"%what,[ref.name, newname])
        return self.get_items(what).copy(ref,newname,logger=logger)
    def copy_distro(self, ref, newname):
        return self.copy_item("distro", ref, newname, logger=None)
    def copy_profile(self, ref, newname):
        return self.copy_item("profile", ref, newname, logger=None)
    def copy_system(self, ref, newname):
        return self.copy_item("system", ref, newname, logger=None)
    def copy_repo(self, ref, newname):
        return self.copy_item("repo", ref, newname, logger=None)
    
    def copy_image(self, ref, newname):
        return self.copy_item("image", ref, newname, logger=None)
    
    def copy_mgmtclass(self, ref, newname):
        return self.copy_item("mgmtclass", ref, newname, logger=None)
    
    def copy_package(self, ref, newname):
        return self.copy_item("package", ref, newname, logger=None)
    
    def copy_file(self, ref, newname):
        return self.copy_item("file", ref, newname, logger=None)
    # ==========================================================================
    def remove_item(self, what, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        if isinstance(what, basestring):
            if isinstance(ref, basestring):
                ref = self.get_item(what, ref)
                if ref is None:
                    return # nothing to remove
        self.log("remove_item(%s)" % what, [ref.name])
        return self.get_items(what).remove(ref.name, recursive=recursive, with_delete=delete, with_triggers=with_triggers, logger=logger)
    def remove_distro(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("distro", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    
    def remove_profile(self,ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("profile", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    def remove_system(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("system", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    def remove_repo(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("repo", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    def remove_image(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("image", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    
    def remove_mgmtclass(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("mgmtclass", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    
    def remove_package(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("package", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    
    def remove_file(self, ref, recursive=False, delete=True, with_triggers=True, logger=None):
        return self.remove_item("file", ref, recursive=recursive, delete=delete, with_triggers=with_triggers, logger=logger)
    # ==========================================================================
    def rename_item(self, what, ref, newname, logger=None):
        self.log("rename_item(%s)"%what,[ref.name,newname])
        return self.get_items(what).rename(ref,newname,logger=logger)
    def rename_distro(self, ref, newname, logger=None):
        return self.rename_item("distro", ref, newname, logger=logger)
    def rename_profile(self, ref, newname, logger=None):
        return self.rename_item("profile", ref, newname, logger=logger)
    def rename_system(self, ref, newname, logger=None):
        return self.rename_item("system", ref, newname, logger=logger)
    def rename_repo(self, ref, newname, logger=None):
        return self.rename_item("repo", ref, newname, logger=logger)
    
    def rename_image(self, ref, newname, logger=None):
        return self.rename_item("image", ref, newname, logger=logger)
    
    def rename_mgmtclass(self, ref, newname, logger=None):
        return self.rename_item("mgmtclass", ref, newname, logger=logger)
    
    def rename_package(self, ref, newname, logger=None):
        return self.rename_item("package", ref, newname, logger=logger)
    
    def rename_file(self, ref, newname, logger=None):
        return self.rename_item("file", ref, newname, logger=logger)
    # ==========================================================================
   
    # FIXME: add a new_item method
    def new_distro(self,is_subobject=False):
        self.log("new_distro",[is_subobject])
        return self._config.new_distro(is_subobject=is_subobject)
    def new_profile(self,is_subobject=False):
        self.log("new_profile",[is_subobject])
        return self._config.new_profile(is_subobject=is_subobject)
    
    def new_system(self,is_subobject=False):
        self.log("new_system",[is_subobject])
        return self._config.new_system(is_subobject=is_subobject)
    def new_repo(self,is_subobject=False):
        self.log("new_repo",[is_subobject])
        return self._config.new_repo(is_subobject=is_subobject)
    
    def new_image(self,is_subobject=False):
        self.log("new_image",[is_subobject])
        return self._config.new_image(is_subobject=is_subobject)
    
    def new_mgmtclass(self,is_subobject=False):
        self.log("new_mgmtclass",[is_subobject])
        return self._config.new_mgmtclass(is_subobject=is_subobject)
    
    def new_package(self,is_subobject=False):
        self.log("new_package",[is_subobject])
        return self._config.new_package(is_subobject=is_subobject)
    
    def new_file(self,is_subobject=False):
        self.log("new_file",[is_subobject])
        return self._config.new_file(is_subobject=is_subobject)
    # ==========================================================================
    def add_item(self, what, ref, check_for_duplicate_names=False, save=True,logger=None):
        self.log("add_item(%s)"%what,[ref.name])
        return self.get_items(what).add(ref,check_for_duplicate_names=check_for_duplicate_names,save=save,logger=logger)
    def add_distro(self, ref, check_for_duplicate_names=False, save=True, logger=None):
        return self.add_item("distro", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    def add_profile(self, ref, check_for_duplicate_names=False,save=True, logger=None):
        return self.add_item("profile", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    def add_system(self, ref, check_for_duplicate_names=False, check_for_duplicate_netinfo=False, save=True, logger=None):
        return self.add_item("system", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    def add_repo(self, ref, check_for_duplicate_names=False,save=True,logger=None):
        return self.add_item("repo", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    def add_image(self, ref, check_for_duplicate_names=False,save=True, logger=None):
        return self.add_item("image", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    
    def add_mgmtclass(self, ref, check_for_duplicate_names=False,save=True, logger=None):
        return self.add_item("mgmtclass", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    
    def add_package(self, ref, check_for_duplicate_names=False,save=True, logger=None):
        return self.add_item("package", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    
    def add_file(self, ref, check_for_duplicate_names=False,save=True, logger=None):
        return self.add_item("file", ref, check_for_duplicate_names=check_for_duplicate_names, save=save,logger=logger)
    # ==========================================================================
    # FIXME: find_items should take all the arguments the other find
    # methods do.
    def find_items(self, what, criteria=None):
        self.log("find_items",[what])
        # defaults
        if criteria is None:
            criteria={}
        items=self._config.get_items(what)
        # empty criteria returns everything
        if criteria == {}:
            res=items
        else:
            res=items.find(return_list=True, no_errors=False, **criteria)
        return res
    def find_distro(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.distros().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
        
    def find_profile(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.profiles().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    def find_system(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.systems().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    def find_repo(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.repos().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    def find_image(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.images().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    
    def find_mgmtclass(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.mgmtclasses().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    
    def find_package(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.packages().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    
    def find_file(self, name=None, return_list=False, no_errors=False, **kargs):
        return self._config.files().find(name=name, return_list=return_list, no_errors=no_errors, **kargs)
    # ==========================================================================
    def __since(self,mtime,collector,collapse=False):
        """
        Called by get_*_since functions.
        """
        results1 = collector()
        results2 = []
        for x in results1:
           if x.mtime == 0 or x.mtime >= mtime:
              if not collapse:
                  results2.append(x)
              else:
                  results2.append(x.to_datastruct())
        return results2
    def get_distros_since(self,mtime,collapse=False):
        """
        Returns distros modified since a certain time (in seconds since Epoch)
        collapse=True specifies returning a hash instead of objects.
        """
        return self.__since(mtime,self.distros,collapse=collapse)
    def get_profiles_since(self,mtime,collapse=False):
        return self.__since(mtime,self.profiles,collapse=collapse)
    def get_systems_since(self,mtime,collapse=False):
        return self.__since(mtime,self.systems,collapse=collapse)
    def get_repos_since(self,mtime,collapse=False):
        return self.__since(mtime,self.repos,collapse=collapse)
    def get_images_since(self,mtime,collapse=False):
        return self.__since(mtime,self.images,collapse=collapse)
    
    def get_mgmtclasses_since(self,mtime,collapse=False):
        return self.__since(mtime,self.mgmtclasses,collapse=collapse)
    
    def get_packages_since(self,mtime,collapse=False):
        return self.__since(mtime,self.packages,collapse=collapse)
    
    def get_files_since(self,mtime,collapse=False):
        return self.__since(mtime,self.files,collapse=collapse)
    # ==========================================================================
    def dump_vars(self, obj, format=False):
        return obj.dump_vars(format)
    # ==========================================================================
    def auto_add_repos(self):
        """
        Import any repos this server knows about and mirror them.
        Credit: Seth Vidal.
        """
        self.log("auto_add_repos")
        try:
            import yum
        except:
            raise CX(_("yum is not installed"))
        version = yum.__version__
        (a,b,c) = version.split(".")
        version = a* 1000 + b*100 + c
        if version < 324:
            raise CX(_("need yum > 3.2.4 to proceed"))
        base = yum.YumBase()
        base.doRepoSetup()
        repos = base.repos.listEnabled()
        if len(repos) == 0:
            raise CX(_("no repos enabled/available -- giving up."))
        for repo in repos:
            url = repo.urls[0]
            cobbler_repo = self.new_repo()
            auto_name = repo.name.replace(" ","")
            # FIXME: probably doesn't work for yum-rhn-plugin ATM
            cobbler_repo.set_mirror(url)
            cobbler_repo.set_name(auto_name)
            print "auto adding: %s (%s)" % (auto_name, url)
            self._config.repos().add(cobbler_repo,save=True)
        # run cobbler reposync to apply changes
        return True 
    # ==========================================================================
    def get_repo_config_for_profile(self,obj):
        return self.yumgen.get_yum_config(obj,True)
    
    def get_repo_config_for_system(self,obj):
        return self.yumgen.get_yum_config(obj,False)
    # ==========================================================================
    def get_template_file_for_profile(self,obj,path):
        template_results = self.pxegen.write_templates(obj,False,path)
        if template_results.has_key(path):
            return template_results[path]
        else:
            return "# template path not found for specified profile"
    def get_template_file_for_system(self,obj,path):
        template_results = self.pxegen.write_templates(obj,False,path)
        if template_results.has_key(path):
            return template_results[path]
        else:
            return "# template path not found for specified system"
    # ==========================================================================
    def generate_kickstart(self,profile,system):
        self.log("generate_kickstart")
        if system:
            return self.kickgen.generate_kickstart_for_system(system)
        else:
            return self.kickgen.generate_kickstart_for_profile(profile) 
    # ==========================================================================
    def check(self, logger=None):
        """
        See if all preqs for network booting are valid.  This returns
        a list of strings containing instructions on things to correct.
        An empty list means there is nothing to correct, but that still
        doesn't mean there are configuration errors.  This is mainly useful
        for human admins, who may, for instance, forget to properly set up
        their TFTP servers for PXE, etc.
        """
        self.log("check")
        check = action_check.BootCheck(self._config, logger=logger)
        return check.run()
    # ==========================================================================
    def dlcontent(self,force=False,logger=None):
        """
        Downloads bootloader content that may not be avialable in packages
        for the given arch, ex: if installing on PPC, get syslinux. If installing
        on x86_64, get elilo, etc.
        """
        # FIXME: teach code that copies it to grab from the right place
        self.log("dlcontent")
        grabber = action_dlcontent.ContentDownloader(self._config, logger=logger)
        return grabber.run(force)
    # ==========================================================================
    def validateks(self, logger=None):
        """
        Use ksvalidator (from pykickstart, if available) to determine
        whether the cobbler kickstarts are going to be (likely) well
        accepted by Anaconda.  Presence of an error does not indicate
        the kickstart is bad, only that the possibility exists.  ksvalidator
        is not available on all platforms and can not detect "future"
        kickstart format correctness.
        """
        self.log("validateks")
        validator = action_validate.Validate(self._config, logger=logger)
        return validator.run()
    # ==========================================================================
    def sync(self,verbose=False, logger=None):
        """
        Take the values currently written to the configuration files in
        /etc, and /var, and build out the information tree found in
        /tftpboot.  Any operations done in the API that have not been
        saved with serialize() will NOT be synchronized with this command.
        """
        self.log("sync")
        sync = self.get_sync(verbose=verbose, logger=logger)
        return sync.run()
    # ==========================================================================
    def get_sync(self,verbose=False,logger=None):
        self.dhcp = self.get_module_from_file(
           "dhcp",
           "module",
           "manage_isc"
        ).get_manager(self._config,logger)
        self.dns = self.get_module_from_file(
           "dns",
           "module",
           "manage_bind"
        ).get_manager(self._config,logger)
        self.tftpd = self.get_module_from_file(
           "tftpd",
           "module",
           "in_tftpd",
        ).get_manager(self._config,logger)
        return action_sync.BootSync(self._config,dhcp=self.dhcp,dns=self.dns,tftpd=self.tftpd,verbose=verbose,logger=logger)
    # ==========================================================================
    def reposync(self, name=None, tries=1, nofail=False, logger=None):
        """
        Take the contents of /var/lib/cobbler/repos and update them --
        or create the initial copy if no contents exist yet.
        """
        self.log("reposync",[name])
        reposync = action_reposync.RepoSync(self._config, tries=tries, nofail=nofail, logger=logger)
        return reposync.run(name)
    # ==========================================================================
    def status(self,mode,logger=None):
        statusifier = action_status.BootStatusReport(self._config,mode,logger=logger)
        return statusifier.run()
    # ==========================================================================
    def import_tree(self,mirror_url,mirror_name,network_root=None,kickstart_file=None,rsync_flags=None,arch=None,breed=None,os_version=None,logger=None):
        """
        Automatically import a directory tree full of distribution files.
        mirror_url can be a string that represents a path, a user@host 
        syntax for SSH, or an rsync:// address.  If mirror_url is a 
        filesystem path and mirroring is not desired, set network_root 
        to something like "nfs://path/to/mirror_url/root" 
        """
        self.log("import_tree",[mirror_url, mirror_name, network_root, kickstart_file, rsync_flags])
        importer_modules = self.get_modules_in_category("manage/import")
        for importer_module in importer_modules:
            manager = importer_module.get_import_manager(self._config,logger)
            if 1:#try:
                (found,pkgdir) = manager.check_for_signature(mirror_url,breed)
                if found: 
                    self.log("running import manager: %s" % manager.what())
                    return manager.run(pkgdir,mirror_url,mirror_name,network_root,kickstart_file,rsync_flags,arch,breed,os_version)
            #except:
            #    self.log("an error occured while running the import manager")
            #    continue
        self.log("No import managers found a valid signature at the location specified")
        return False
    # ==========================================================================
    def acl_config(self,adduser=None,addgroup=None,removeuser=None,removegroup=None, logger=None):
        """
        Configures users/groups to run the cobbler CLI as non-root.
        Pass in only one option at a time.  Powers "cobbler aclconfig"
        """
        acl = action_acl.AclConfig(self._config, logger)
        return acl.run(
            adduser=adduser,
            addgroup=addgroup,
            removeuser=removeuser,
            removegroup=removegroup
        )
    # ==========================================================================
    def serialize(self):
        """
        Save the config file(s) to disk.
        Cobbler internal use only.
        """
        return self._config.serialize()
    def deserialize(self):
        """
        Load the current configuration from config file(s)
        Cobbler internal use only.
        """
        return self._config.deserialize()
    def deserialize_raw(self,collection_name):
        """
        Get the collection back just as raw data.
        Cobbler internal use only.
        """
        return self._config.deserialize_raw(collection_name)
    def deserialize_item_raw(self,collection_name,obj_name):
        """
        Get an object back as raw data.
        Can be very fast for shelve or catalog serializers
        Cobbler internal use only.
        """
        return self._config.deserialize_item_raw(collection_name,obj_name)
    # ==========================================================================
    def get_module_by_name(self,module_name):
        """
        Returns a loaded cobbler module named 'name', if one exists, else None.
        Cobbler internal use only.
        """
        return module_loader.get_module_by_name(module_name)
    def get_module_from_file(self,section,name,fallback=None):
        """
        Looks in /etc/cobbler/modules.conf for a section called 'section'
        and a key called 'name', and then returns the module that corresponds
        to the value of that key.
        Cobbler internal use only.
        """
        return module_loader.get_module_from_file(section,name,fallback)
    def get_modules_in_category(self,category):
        """
        Returns all modules in a given category, for instance "serializer", or "cli".
        Cobbler internal use only.
        """
        return module_loader.get_modules_in_category(category)
    # ==========================================================================
    def authenticate(self,user,password):
        """
        (Remote) access control.
        Cobbler internal use only.
        """
        rc = self.authn.authenticate(self,user,password)
        self.log("authenticate",[user,rc])
        return rc 
    def authorize(self,user,resource,arg1=None,arg2=None):
        """
        (Remote) access control.
        Cobbler internal use only.
        """
        rc = self.authz.authorize(self,user,resource,arg1,arg2)
        self.log("authorize",[user,resource,arg1,arg2,rc],debug=True)
        return rc
    # ==========================================================================
    def build_iso(self,iso=None,profiles=None,systems=None,buildisodir=None,distro=None,standalone=None,source=None, exclude_dns=None, logger=None):
        builder = action_buildiso.BuildIso(self._config, logger=logger)
        return builder.run(
           iso=iso, profiles=profiles, systems=systems, buildisodir=buildisodir, distro=distro, standalone=standalone, source=source, exclude_dns=exclude_dns
        )
    # ==========================================================================
    def hardlink(self, logger=None):
        linker = action_hardlink.HardLinker(self._config, logger=logger)
        return linker.run()
    # ==========================================================================
    def replicate(self, cobbler_master = None, distro_patterns="", profile_patterns="", system_patterns="", repo_patterns="", image_patterns="",
                  mgmtclass_patterns=None, package_patterns=None, file_patterns=None, prune=False, omit_data=False, sync_all=False, logger=None):
        """
        Pull down data/configs from a remote cobbler server that is a master to this server.
        """
        replicator = action_replicate.Replicate(self._config, logger=logger)
        return replicator.run(
              cobbler_master       = cobbler_master,
              distro_patterns      = distro_patterns,
              profile_patterns     = profile_patterns,
              system_patterns      = system_patterns,
              repo_patterns        = repo_patterns,
              image_patterns       = image_patterns,
              mgmtclass_patterns   = mgmtclass_patterns,
              package_patterns     = package_patterns,
              file_patterns        = file_patterns,
              prune                = prune,
              omit_data            = omit_data,
              sync_all             = sync_all
        )
    # ==========================================================================
    def report(self, report_what = None, report_name = None, report_type = None, report_fields = None, report_noheaders = None):
        """
        Report functionality for cobbler
        """
        reporter = action_report.Report(self._config)
        return reporter.run(report_what = report_what, report_name = report_name,\
                            report_type = report_type, report_fields = report_fields,\
                            report_noheaders = report_noheaders)
    # ==========================================================================
    def get_kickstart_templates(self):
        return utils.get_kickstar_templates(self)
    # ==========================================================================
    def power_on(self, system, user=None, password=None, logger=None):
        """
        Powers up a system that has power management configured.
        """
        return action_power.PowerTool(self._config,system,self,user,password,logger=logger).power("on")
    def power_off(self, system, user=None, password=None, logger=None):
        """
        Powers down a system that has power management configured.
        """
        return action_power.PowerTool(self._config,system,self,user,password,logger=logger).power("off")
    def reboot(self,system, user=None, password=None, logger=None):
        """
        Cycles power on a system that has power management configured.
        """
        self.power_off(system, user, password, logger=logger)
        time.sleep(5)
        return self.power_on(system, user, password, logger=logger)
    def power_status(self, system, user=None, password=None, logger=None):
        """
        Returns the power status for a system that has power management configured.
        @return: 0  the system is powered on, False if it's not or None on error
        """
        return action_power.PowerTool(self._config, system, self, user, password, logger = logger).power("status")
    # ==========================================================================
    def clear_logs(self, system, logger=None):
        """
        Clears console and anamon logs for system
        """
        return action_log.LogTool(self._config,system,self, logger=logger).clear()
    def get_os_details(self):
        return (self.dist, self.os_version)
 | 
	gpl-2.0 | 7,651,953,824,866,149,000 | 37.993664 | 157 | 0.578168 | false | 
| 
	Adamssss/projectEuler | 
	Problem 001-150 Python/pb103.py | 
	1 | 
	1978 | 
	import math
import time
t1 = time.time()
# using all the numbers
# the sub pairs cover the exact original set
def exactsub(oset):
    l = len(oset)
    if l == 2:
        return [[[oset[0]],[oset[1]]]]
    result = []
    f = oset[0]
    rest = oset[1:]
    result.append([[f],rest])
    for i in exactsub(rest):
        a = i[0]
        b = i[1]
        result.append([a+[f],b])
        result.append([a,b+[f]])
    return result
def allsub(oset):
    temp = exactsub(oset)
    result = temp[:]
    for i in temp:
        if len(i[0]) > 1:
            result += exactsub(i[0])
        if len(i[1]) > 1:
            result += exactsub(i[1])
    return result
def checksub(setp):
    B = setp[0]
    C = setp[1]
    sb = sum(B)
    sc = sum(C)
    if sb == sc:
        return False
    lb = len(B)
    lc = len(C)
    if lb > lc and sb <= sc:
        return False
    if lb < lc and sb >= sc:
        return False
    return True
def checkset(tset):
    for i in allsub(tset):
        if not checksub(i):
            return False
    return True
def toString(aset):
    temp = 0
    for i in aset:
        dig = math.floor(math.log10(i)+1)
        temp = temp*math.pow(10,dig)+i
    return int(temp)
sset = [[],[1],[1,2],[2,3,4],[3,5,6,7],[6,9,11,12,13],[11,18,19,20,22,25]]
AL = 2
def near(n):
    if n == 1:
        result = []
        for i in range(0,AL*2):
            result.append([i])
        return result
    result = []
    for i in range(0,AL*2):
        for j in near(n-1):
            result.append([i]+j)
    return result
def addaprox(seta,setb):
    result = seta[:]
    for i in range(len(seta)):
        result[i] += setb[i]
    return result
def makeset(n):
    temp = sset[n-1]
    a = temp[n//2-1]
    base = [a]
    for i in range(n-1):
        base.append(temp[i]+a-AL)
    for j in near(n):
        temp = addaprox(base,j)
        if checkset(temp):
            return temp
print(toString(makeset(7)))
print("time:",time.time()-t1)  
    
 | 
	mit | 849,634,466,316,029,700 | 18.979798 | 74 | 0.505561 | false | 
| 
	NeurodataWithoutBorders/api-python | 
	examples/create_scripts/extensions/e-trajectorySeries2.py | 
	1 | 
	2936 | 
	# Definitions of extension to TimeSeries to store trajectory information
# All format specifications must be stored in dictionary "fs"
# "mnts2" is the "namespace" for this extension
# This extension explicitly specifies meaning for each column of dataset data
{"fs": {"mnts2": {
"info": {
    "name": "Sabes lab data trajectory series",
    "version": "1.0",
    "date": "Oct 3, 2016",
    "author": "Jeff Teeters",
    "contact": "[email protected]",
    "description": ("Extension to store timeseries of hand trajectories for Sabes lab data")
},
"schema": {
    "<TrajectorySeries>/": {
        "merge": ["core:<SpatialSeries>/"],
        "attributes": {
            "ancestry": {
                "data_type": "text",
                "dimensions": ["3"],
                "value": ["TimeSeries", "SpatialSeries", "TrajectorySeries"],
                "const": True},
            "help": {
                "data_type": "text",
                "value": "Trajectory of hand movement positions",
                "const": True},
        },
        "data": {
            "description": ("Measurements of hand trajectory, recorded at each point of time."),
            "dimensions": ["num_times", "trajectories"],
            "data_type": "float32",
            "trajectories": {
                "type": "structure",
                # define components of trajectories dimension 
                "components": [
                    { "alias": "s1_x", "unit": "meter" },
                    { "alias": "s1_y", "unit": "meter" },
                    { "alias": "s1_z", "unit": "meter" },
                    { "alias": "s1_pitch", "unit": "radian" },
                    { "alias": "s1_roll", "unit": "radian" },
                    { "alias": "s1_yaw", "unit": "radian" },                   
                    { "alias": "s2_x", "unit": "meter" },
                    { "alias": "s2_y", "unit": "meter" },
                    { "alias": "s2_z", "unit": "meter" },
                    { "alias": "s2_pitch", "unit": "radian" },
                    { "alias": "s2_roll", "unit": "radian" },
                    { "alias": "s2_yaw", "unit": "radian" },                   
                    { "alias": "s3_x", "unit": "meter" },
                    { "alias": "s3_y", "unit": "meter" },
                    { "alias": "s3_z", "unit": "meter" },
                    { "alias": "s3_pitch", "unit": "radian" },
                    { "alias": "s3_roll", "unit": "radian" },
                    { "alias": "s3_yaw", "unit": "radian" },                   
                    { "alias": "s4_x", "unit": "meter" },
                    { "alias": "s4_y", "unit": "meter" },
                    { "alias": "s4_z", "unit": "meter" },
                    { "alias": "s4_pitch", "unit": "radian" },
                    { "alias": "s4_roll", "unit": "radian" },
                    { "alias": "s4_yaw", "unit": "radian" } ] },
        }
    }
}
}}} | 
	bsd-3-clause | -9,183,462,637,381,410,000 | 42.835821 | 96 | 0.419619 | false | 
| 
	tangyaohua/dl4mt | 
	session2/lm/deepy/trainers/optimize.py | 
	1 | 
	4684 | 
	#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging as loggers
import numpy as np
import theano
import theano.tensor as T
from theano.ifelse import ifelse
from ..utils import FLOATX, dim_to_var, EPSILON
from .util import wrap_core, multiple_l2_norm
from ..conf import TrainerConfig
logging = loggers.getLogger(__name__)
def optimize_updates(params, gradients, config=None, shapes=None):
    """
    General optimization function for Theano.
    Parameters:
        params - parameters
        gradients - gradients
        config - training config
    Returns:
        Theano updates
    :type config: deepy.TrainerConfig or dict
    """
    if config and isinstance(config, dict):
        config = TrainerConfig(config)
    # Clipping
    if config:
        clip_value = config.get("gradient_clipping", None)
        if clip_value:
            clip_constant = T.constant(clip_value, dtype=FLOATX)
            if config.avoid_compute_embed_norm:
                grad_norm = multiple_l2_norm([t[1] for t in zip(params, gradients) if not t[0].name.startswith("W_embed")])
            else:
                grad_norm = multiple_l2_norm(gradients)
            isnan = T.or_(T.isnan(grad_norm), T.isinf(grad_norm))
            multiplier = ifelse(grad_norm < clip_constant,
                                T.constant(1., dtype=FLOATX), clip_constant / (grad_norm + EPSILON))
            # Clip
            clipped_gradients = []
            for param, g in zip(params, gradients):
                g = multiplier * g
                if config.avoid_nan:
                    g = T.switch(isnan, np.float32(0.1) * param, g)
                if config.gradient_tolerance:
                    g = ifelse(grad_norm > config.gradient_tolerance, T.zeros_like(g) + EPSILON, g)
                clipped_gradients.append(g)
            gradients = clipped_gradients
    # Regularization
    if config and config.weight_l2:
        regularized_gradients = []
        for param, grad in zip(params, gradients):
            grad = grad + (2 * config.weight_l2 * param)
            regularized_gradients.append(grad)
        gradients = regularized_gradients
    # Avoid nan but not computing the norm
    # This is not recommended
    if config and config.avoid_nan and not config.gradient_clipping:
        logging.info("avoid NaN gradients")
        new_gradients = []
        for grad in gradients:
            new_grad = ifelse(T.isnan(grad).any(), T.zeros_like(grad) + EPSILON, grad)
            new_gradients.append(new_grad)
        gradients = new_gradients
    # Find method
    method = "SGD"
    if config:
        method = config.get("method", method).upper()
    # Get Function
    func = None
    if method in ["SGD", "ADAGRAD", "ADADELTA", "FINETUNING_ADAGRAD"]:
        from cores.ada_family import ada_family_core
        func = ada_family_core
    elif method == "ADAM":
        from cores.adam import adam_core
        func = adam_core
    elif method == "RMSPROP":
        from cores.rmsprop import rmsprop_core
        func = rmsprop_core
    elif method == "MOMENTUM":
        from cores.momentum import momentum_core
        func = momentum_core
    if not func:
        raise NotImplementedError("method '%s' is not supported" % method)
    logging.info("optimize method=%s parameters=%s" % (method, str(params)))
    free_parameters = []
    return_vals = wrap_core(func, config, params, gradients)
    if type(return_vals) == list and type(return_vals[0]) == list:
        updates, free_parameters = return_vals
    else:
        updates = return_vals
    # No free param recording
    if config and not config.record_free_params:
        free_parameters = []
    # Weight bound
    if config.weight_bound:
        logging.info("apply weight bound of %.2f" % config.weight_bound)
        new_updates = []
        for param, update_value in updates:
            bounded_value = (update_value * (T.abs_(update_value) <= config.weight_bound) +
                             config.weight_bound * (update_value > config.weight_bound) +
                             -config.weight_bound * (update_value < -config.weight_bound))
            new_updates.append((param, bounded_value))
        updates = new_updates
    return updates, free_parameters
def optimize_function(params, config=None):
    """
    Create a optimizing function receives gradients.
    Parameters:
        params - parameters
        config - training configuration
    Returns:
        updating function receives gradients
    """
    gs = [dim_to_var(p.ndim) for p in params]
    updates, _ = optimize_updates(params, gs, config)
    return theano.function(gs, [], updates=updates)
 | 
	bsd-3-clause | -5,147,573,832,780,105,000 | 33.696296 | 123 | 0.609308 | false | 
| 
	TeamHG-Memex/hgprofiler | 
	lib/cli/run_server.py | 
	1 | 
	1340 | 
	import app
import cli
class RunServerCli(cli.BaseCli):
    """ A tool for running a development server. """
    def _get_args(self, arg_parser):
        """ Customize arguments. """
        arg_parser.add_argument(
            '--debug',
            action='store_true',
            help='Enable debug mode: errors produce stack traces and' \
                 ' the server auto reloads on source code changes.'
        )
        arg_parser.add_argument(
            '--debug-db',
            action='store_true',
            help='Print database queries.'
        )
        arg_parser.add_argument(
            '--ip',
            default='127.0.0.1',
            help='Specify an IP address to bind to. (Defaults to loopback.)'
        )
        arg_parser.add_argument(
            '--latency',
            type=float,
            metavar='L',
            help='Delay each request by <L> seconds.'
        )
    def _run(self, args, config):
        """ Main entry point. """
        flask_app = app.bootstrap(
            debug=args.debug,
            debug_db=args.debug_db,
            latency=args.latency,
            log_level=args.verbosity
        )
        # Disable secure cookies for the development server.
        flask_app.config["SESSION_COOKIE_SECURE"] = False
        flask_app.run(host=args.ip, threaded=True)
 | 
	apache-2.0 | 466,433,427,109,874,750 | 26.346939 | 76 | 0.526866 | false | 
| 
	yephper/django | 
	django/conf/locale/en_GB/formats.py | 
	1 | 
	2159 | 
	# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j M Y'                   # '25 Oct 2006'
TIME_FORMAT = 'P'                       # '2:30 p.m.'
DATETIME_FORMAT = 'j M Y, P'            # '25 Oct 2006, 2:30 p.m.'
YEAR_MONTH_FORMAT = 'F Y'               # 'October 2006'
MONTH_DAY_FORMAT = 'j F'                # '25 October'
SHORT_DATE_FORMAT = 'd/m/Y'             # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P'       # '25/10/2006 2:30 p.m.'
FIRST_DAY_OF_WEEK = 1                   # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
    '%d/%m/%Y', '%d/%m/%y',             # '25/10/2006', '25/10/06'
    # '%b %d %Y', '%b %d, %Y',          # 'Oct 25 2006', 'Oct 25, 2006'
    # '%d %b %Y', '%d %b, %Y',          # '25 Oct 2006', '25 Oct, 2006'
    # '%B %d %Y', '%B %d, %Y',          # 'October 25 2006', 'October 25, 2006'
    # '%d %B %Y', '%d %B, %Y',          # '25 October 2006', '25 October, 2006'
]
DATETIME_INPUT_FORMATS = [
    '%Y-%m-%d %H:%M:%S',                # '2006-10-25 14:30:59'
    '%Y-%m-%d %H:%M:%S.%f',             # '2006-10-25 14:30:59.000200'
    '%Y-%m-%d %H:%M',                   # '2006-10-25 14:30'
    '%Y-%m-%d',                         # '2006-10-25'
    '%d/%m/%Y %H:%M:%S',                # '25/10/2006 14:30:59'
    '%d/%m/%Y %H:%M:%S.%f',             # '25/10/2006 14:30:59.000200'
    '%d/%m/%Y %H:%M',                   # '25/10/2006 14:30'
    '%d/%m/%Y',                         # '25/10/2006'
    '%d/%m/%y %H:%M:%S',                # '25/10/06 14:30:59'
    '%d/%m/%y %H:%M:%S.%f',             # '25/10/06 14:30:59.000200'
    '%d/%m/%y %H:%M',                   # '25/10/06 14:30'
    '%d/%m/%y',                         # '25/10/06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
 | 
	bsd-3-clause | -7,802,788,513,267,937,000 | 49.404762 | 79 | 0.443261 | false | 
| 
	mcf-rocha/swagger-graph | 
	api-ingestor.py | 
	1 | 
	2985 | 
	import sys
import pysvn
import json
import configparser, os
from pymongo import MongoClient
config = configparser.RawConfigParser()
config.read('/Users/mcf/configuration.cfg')
svnroot = config.get('SVN', 'root')
svnpath = config.get('SVN', 'path')
svnusername = config.get('SVN', 'username')
svnpwd = config.get('SVN', 'pwd')
mongohost = config.get('MongoDB', 'host')
mongoport = config.getint('MongoDB', 'port')
svnclient = pysvn.Client()
svnclient.set_default_username(svnusername)
svnclient.set_default_password(svnpwd)
try:
    dirlist = svnclient.list(svnroot+svnpath,peg_revision=pysvn.Revision(pysvn.opt_revision_kind.head))
except BaseException as be:
    print("Could not connect to SVN. Execution aborted.",)
    print(be.__str__())
    sys.exit()
else:
    mongoclient = MongoClient(mongohost, mongoport)
    print("Cleaning the MongoDB collection...")
    mongodb = mongoclient.catlog
    mongodb.swagger.drop()
    for item, locked in dirlist:
        if item.kind == pysvn.node_kind.file:
            #print("Starting to process document {}".format(item.path))
            nomearquivoswagger = item.path.rpartition("/")[2]
            print("Starting to process document {}".format(nomearquivoswagger))
            swaggercontentbyte = svnclient.cat(item.path,peg_revision=pysvn.Revision(pysvn.opt_revision_kind.head))
            try:
                swaggercontentstring = swaggercontentbyte.decode("utf-8")
            except:
                print("      erro ao decode utf-8. tentando utf-8-sig")
                sys.exit(1)
            ##-------------------AJUSTES REQUERIDOS PELO SINTAXE DO SWAGGER QUE NÃO SÃO PERMITIDOS NO MONGO
            swaggercontentstring2 = swaggercontentstring.replace("$ref","ref")
            ##-------------------
            #print(swaggercontentstring2)
            try:
                swaggerjson = json.loads(swaggercontentstring2)
            except json.decoder.JSONDecodeError as jde:
                swaggercontentstring = swaggercontentbyte.decode("utf-8-sig")
                swaggercontentstring2 = swaggercontentstring.replace("$ref","ref")
                swaggerjson = json.loads(swaggercontentstring2)
            #else:
            #    print("      An encoding problem has happened when inserting Swagger information into the MongoDB collection... {}".format(getSwaggerTitle(swaggerjson)))
            #    sys.exit(1)
            swaggerjson["_id"] = nomearquivoswagger
            print("      Inserting Swagger information into the MongoDB collection...")
            try:
                result = mongodb.swagger.insert_one(swaggerjson)
            except BaseException as mongoex:
                print("            The Swagger information could not be inserted into the MongoDB collection.")
                print(mongoex.__str__())
                print()
            else:
                print("            Document _id: {}".format(nomearquivoswagger,result.inserted_id))
    mongoclient.close() | 
	mit | 6,348,466,525,802,908,000 | 40.444444 | 170 | 0.628227 | false | 
| 
	TwoBitAlchemist/NeoAlchemy | 
	neoalchemy/cypher/verbs.py | 
	1 | 
	4169 | 
	from __future__ import unicode_literals
import six
from ..shared.objects import Property
from .operations import CypherExpression, ComparisonExpression, QueryParams
class CypherQuery(list):
    def __init__(self, graph_obj, use_full_pattern=False):
        self.params = QueryParams()
        try:
            verb = self.verb
        except AttributeError:
            verb = self.__class__.__name__.upper()
        pattern = graph_obj.pattern(inline_props=use_full_pattern)
        super(CypherQuery, self).__init__(['%s %s' % (verb, pattern)])
    def delete(self, *args, **kw):
        detach = kw.get('detach')
        keyword = 'DETACH DELETE ' if detach else 'DELETE '
        self.append(keyword + ', '.join(arg.var for arg in args))
        return self
    def limit(self, limit):
        self.append('LIMIT %i' % int(limit))
        return self
    def order_by(self, *args, **kw):
        stmt = 'ORDER BY ' + ', '.join(arg.var for arg in args)
        if kw.get('desc'):
            stmt += ' DESC'
        self.append(stmt)
        return self
    def remove(self, *args):
        self.append('REMOVE ' + ', '.join(arg.var for arg in args))
        return self
    def return_(self, *args):
        if args:
            self.append('RETURN ' + ', '.join(arg.var for arg in args))
        else:
            self.append('RETURN *')
        return self
    def set(self, *props):
        prop_list = [self._add_expression(prop) for prop in props]
        if prop_list:
            self.append('    SET ' + ', '.join(prop_list))
        return self
    def skip(self, skip):
        self.append('SKIP %i' % int(skip))
        return self
    def where(self, *exprs, **kw):
        or_ = kw.pop('or_', False)
        stmt_list = [self._add_expression(expr) for expr in exprs]
        if stmt_list:
            statements = ' AND '.join(stmt_list)
            if any(keyword.rjust(9) == self[-1][:9]
                    for keyword in ('WHERE', 'AND', 'OR')):
                keyword = 'AND ' if not or_ else 'OR '
            else:
                keyword = 'WHERE '
            self.append(keyword.rjust(10) + statements)
        return self
    def with_(self, *args):
        self.append('WITH ' + ', '.join(arg.var for arg in args))
        return self
    def _add_expression(self, expr):
        if isinstance(expr, Property):
            prop = expr
            expr = ComparisonExpression(prop, prop.value, '=')
        else:
            if not isinstance(expr, CypherExpression):
                raise ValueError('Must be CypherExpression or Property')
        for key, value in expr.compile().params.items():
            self.params[key] = value
            if self.params.last_key.startswith('param'):
                expr.replace(key, self.params.last_key)
        return str(expr)
    def __str__(self):
        return '\n'.join(map(str, self))
    def __and__(self, query):
        self.extend(query)
        self.params.update(query.params)
        return self
    def __or__(self, query):
        self.append('UNION ALL')
        self.extend(query)
        self.params.update(query.params)
        return self
    def __xor__(self, query):
        self.append('UNION')
        self.extend(query)
        self.params.update(query.params)
        return self
class Create(CypherQuery):
    def __init__(self, graph_obj):
        super(Create, self).__init__(graph_obj)
        self.set(*graph_obj.values())
class Match(CypherQuery):
    def __init__(self, graph_obj, optional=False):
        if optional:
            self.verb = 'OPTIONAL MATCH'
        super(Match, self).__init__(graph_obj)
        self.where(*(v for k, v in graph_obj.items()
                     if k in graph_obj.bound_keys))
class Merge(CypherQuery):
    def __init__(self, graph_obj):
        super(Merge, self).__init__(graph_obj, use_full_pattern=True)
        self.params.update({p.param: p.value for k, p in graph_obj.items()
                            if k in graph_obj.bound_keys})
    def on_create(self):
        self.append('ON CREATE')
        return self
    def on_match(self):
        self.append('ON MATCH')
        return self
 | 
	mit | -7,028,930,501,125,680,000 | 28.992806 | 75 | 0.550971 | false | 
| 
	mahabs/nitro | 
	nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnspolicy_dnspolicylabel_binding.py | 
	1 | 
	6004 | 
	#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
#   Licensed under the Apache License, Version 2.0 (the "License")
#   you may not use this file except in compliance with the License.
#   You may obtain a copy of the License at
#
#       http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#   distributed under the License is distributed on an "AS IS" BASIS,
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#   See the License for the specific language governing permissions and
#   limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class dnspolicy_dnspolicylabel_binding(base_resource) :
	""" Binding class showing the dnspolicylabel that can be bound to dnspolicy.
	"""
	def __init__(self) :
		self._boundto = ""
		self._priority = 0
		self._activepolicy = 0
		self._gotopriorityexpression = ""
		self._labeltype = ""
		self._labelname = ""
		self._name = ""
		self.___count = 0
	@property
	def boundto(self) :
		"""Location where policy is bound.
		"""
		try :
			return self._boundto
		except Exception as e:
			raise e
	@boundto.setter
	def boundto(self, boundto) :
		"""Location where policy is bound.
		"""
		try :
			self._boundto = boundto
		except Exception as e:
			raise e
	@property
	def name(self) :
		"""Name of the DNS policy.
		"""
		try :
			return self._name
		except Exception as e:
			raise e
	@name.setter
	def name(self, name) :
		"""Name of the DNS policy.
		"""
		try :
			self._name = name
		except Exception as e:
			raise e
	@property
	def priority(self) :
		"""Specifies the priority of the policy.
		"""
		try :
			return self._priority
		except Exception as e:
			raise e
	@property
	def labelname(self) :
		"""Name of the label to invoke if the current policy rule evaluates to TRUE.
		"""
		try :
			return self._labelname
		except Exception as e:
			raise e
	@property
	def gotopriorityexpression(self) :
		"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
		"""
		try :
			return self._gotopriorityexpression
		except Exception as e:
			raise e
	@property
	def labeltype(self) :
		"""Type of policy label invocation.<br/>Possible values = reqvserver, resvserver, policylabel.
		"""
		try :
			return self._labeltype
		except Exception as e:
			raise e
	@property
	def activepolicy(self) :
		"""Indicates whether policy is bound or not.
		"""
		try :
			return self._activepolicy
		except Exception as e:
			raise e
	def _get_nitro_response(self, service, response) :
		""" converts nitro response into object and returns the object array in case of get request.
		"""
		try :
			result = service.payload_formatter.string_to_resource(dnspolicy_dnspolicylabel_binding_response, response, self.__class__.__name__)
			if(result.errorcode != 0) :
				if (result.errorcode == 444) :
					service.clear_session(self)
				if result.severity :
					if (result.severity == "ERROR") :
						raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
				else :
					raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
			return result.dnspolicy_dnspolicylabel_binding
		except Exception as e :
			raise e
	def _get_object_name(self) :
		""" Returns the value of object identifier argument
		"""
		try :
			if (self.name) :
				return str(self.name)
			return None
		except Exception as e :
			raise e
	@classmethod
	def get(cls, service, name) :
		""" Use this API to fetch dnspolicy_dnspolicylabel_binding resources.
		"""
		try :
			obj = dnspolicy_dnspolicylabel_binding()
			obj.name = name
			response = obj.get_resources(service)
			return response
		except Exception as e:
			raise e
	@classmethod
	def get_filtered(cls, service, name, filter_) :
		""" Use this API to fetch filtered set of dnspolicy_dnspolicylabel_binding resources.
		Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
		"""
		try :
			obj = dnspolicy_dnspolicylabel_binding()
			obj.name = name
			option_ = options()
			option_.filter = filter_
			response = obj.getfiltered(service, option_)
			return response
		except Exception as e:
			raise e
	@classmethod
	def count(cls, service, name) :
		""" Use this API to count dnspolicy_dnspolicylabel_binding resources configued on NetScaler.
		"""
		try :
			obj = dnspolicy_dnspolicylabel_binding()
			obj.name = name
			option_ = options()
			option_.count = True
			response = obj.get_resources(service, option_)
			if response :
				return response[0].__dict__['___count']
			return 0
		except Exception as e:
			raise e
	@classmethod
	def count_filtered(cls, service, name, filter_) :
		""" Use this API to count the filtered set of dnspolicy_dnspolicylabel_binding resources.
		Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
		"""
		try :
			obj = dnspolicy_dnspolicylabel_binding()
			obj.name = name
			option_ = options()
			option_.count = True
			option_.filter = filter_
			response = obj.getfiltered(service, option_)
			if response :
				return response[0].__dict__['___count']
			return 0
		except Exception as e:
			raise e
	class Labeltype:
		reqvserver = "reqvserver"
		resvserver = "resvserver"
		policylabel = "policylabel"
class dnspolicy_dnspolicylabel_binding_response(base_response) :
	def __init__(self, length=1) :
		self.dnspolicy_dnspolicylabel_binding = []
		self.errorcode = 0
		self.message = ""
		self.severity = ""
		self.sessionid = ""
		self.dnspolicy_dnspolicylabel_binding = [dnspolicy_dnspolicylabel_binding() for _ in range(length)]
 | 
	apache-2.0 | -9,121,447,943,516,050,000 | 26.167421 | 134 | 0.698368 | false | 
| 
	pudo/regenesis | 
	legacy/regenesis/test_table.py | 
	1 | 
	2695 | 
	# -*- coding: utf-8 -*-
import unittest
import table
MIKROZENSUS_11111_0001 = """GENESIS-Tabelle: Temporär
Gebietsfläche: Bundesländer, Stichtag;;;;;;;;;;;;;
Feststellung des Gebietsstands;;;;;;;;;;;;;
Gebietsfläche (qkm);;;;;;;;;;;;;
;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag;Stichtag
;31.12.1995;31.12.1996;31.12.1997;31.12.1998;31.12.1999;31.12.2000;31.12.2001;31.12.2002;31.12.2003;31.12.2004;31.12.2005;31.12.2006;31.12.2007
Baden-Württemberg;35752,50;35751,76;35751,85;35751,63;35751,36;35751,36;35751,64;35751,64;35751,65;35751,64;35751,65;35751,50;35751,40
Bayern;70550,87;70550,87;70548,00;70547,96;70547,81;70547,85;70549,93;70549,32;70549,19;70549,44;70551,57;70551,57;70551,56
Berlin;890,82;890,85;890,77;890,22;891,41;891,69;891,76;891,75;891,75;891,82;891,85;891,02;891,02
Thüringen;16171,12;16170,88;16171,57;16171,70;16171,85;16171,98;16171,94;16172,21;16172,14;16172,08;16172,10;16172,14;16172,10
Insgesamt;357022,31;357021,43;357020,79;357022,17;357020,22;357021,54;357022,90;357026,55;357030,32;357045,64;357092,90;357114,25;357104,07
_____
Gebietsfläche:
"Berlin (1995-2000): bezogen auf den Gebietsstand 01.01.2001;"
Rheinland-Pfalz: Landessumme ab 2004 einschl. des gemein-
"        schaftlichen deutsch-luxemburgischen Hoheitsgebiets;"
Sachsen (1995): bezogen auf den Gebietsstand 01.01.1996.
_____
(C)opyright Statistisches Bundesamt, Wiesbaden 2010
Stand: 22.04.2010 / 21:32:50"""
GEBURTENZIFFERN_12612_0102 = """GENESIS-Tabelle: Temporär
Geburtenziffern: Deutschland, Jahre, Altersjahre;;;;;;;;
Statistik der Geburten;;;;;;;;
Deutschland;;;;;;;;
Lebendgeborene je 1000 Frauen (Anzahl);;;;;;;;
;Jahr;Jahr;Jahr;Jahr;Jahr;Jahr;Jahr;Jahr
;2001;2002;2003;2004;2005;2006;2007;2008
15 Jahre;1,0;1,0;0,9;0,8;0,8;0,8;0,8;0,8
16 Jahre;3,1;3,2;3,0;2,8;2,6;2,6;2,5;2,6
17 Jahre;7,5;7,7;7,1;6,4;6,2;5,8;5,5;5,7
47 Jahre;0,2;0,1;0,2;0,2;0,2;0,2;0,2;0,2
48 Jahre;0,0;0,1;0,1;0,1;0,1;0,1;0,1;0,1
49 Jahre;0,0;0,0;0,0;0,0;0,1;0,1;0,1;0,1
_____
Durchschnittliches Alter:
Differenz zwischen Geburtsjahr des Kindes und Geburtsjahr
der Mutter.
_____
(C)opyright Statistisches Bundesamt, Wiesbaden 2010
Stand: 10.04.2010 / 21:25:57"""
class tableParserTest(unittest.TestCase):
    
    def _make_parser(self, data, table_id):
        structure, variables = table.load_structure(table_id)
        return table.tableParser(structure, variables, data)
    
    def setUp(self):
        pass
    
    def test_parse_geburtenziffern(self):
        parser = self._make_parser(GEBURTENZIFFERN_12612_0102, "12612-0102")
        parser.parse()
        assert False
    
if __name__ == '__main__':
    unittest.main() | 
	mit | -8,605,079,979,203,655,000 | 38.529412 | 143 | 0.709341 | false | 
| 
	hexpl0it/plugin.video.genesi-ita | 
	resources/lib/libraries/js2py/prototypes/jsarray.py | 
	1 | 
	14657 | 
	def to_arr(this):
    """Returns Python array from Js array"""
    return [this.get(str(e)) for e in xrange(len(this))]
ARR_STACK = set({})
class ArrayPrototype:
    def toString():
        # this function is wrong but I will leave it here fore debugging purposes.
        func = this.get('join')
        if not func.is_callable():
            @this.Js
            def func():
                return '[object %s]'%this.Class
        return func.call(this, ())
    def toLocaleString():
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        # separator is simply a comma ','
        if not arr_len:
            return ''
        res = []
        for i in xrange(arr_len):
            element = array[str(i)]
            if element.is_undefined() or element.is_null():
                res.append('')
            else:
                cand = element.to_object()
                str_func = element.get('toLocaleString')
                if not str_func.is_callable():
                    raise this.MakeError('TypeError', 'toLocaleString method of item at index %d is not callable'%i)
                res.append(element.callprop('toLocaleString').value)
        return ','.join(res)
    def concat():
        array = this.to_object()
        A = this.Js([])
        items = [array]
        items.extend(to_arr(arguments))
        n = 0
        for E in items:
            if E.Class=='Array':
                k = 0
                e_len = len(E)
                while k<e_len:
                    if E.has_property(str(k)):
                        A.put(str(n), E.get(str(k)))
                    n+=1
                    k+=1
            else:
                A.put(str(n), E)
                n+=1
        return A
    def join(separator):
        ARR_STACK.add(this)
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        separator = ',' if separator.is_undefined() else separator.to_string().value
        elems = []
        for e in xrange(arr_len):
            elem = array.get(str(e))
            if elem in ARR_STACK:
                s = ''
            else:
                s = elem.to_string().value
            elems.append(s if not (elem.is_undefined() or elem.is_null()) else '')
        res =  separator.join(elems)
        ARR_STACK.remove(this)
        return res
    def pop(): #todo check
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not arr_len:
            array.put('length', this.Js(arr_len))
            return None
        ind = str(arr_len-1)
        element = array.get(ind)
        array.delete(ind)
        array.put('length', this.Js(arr_len-1))
        return element
    def push(item): # todo check
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        to_put = arguments.to_list()
        i = arr_len
        for i, e in enumerate(to_put, arr_len):
            array.put(str(i), e)
        if to_put:
            i+=1
            array.put('length', this.Js(i))
        return i
    def reverse():
        array = this.to_object() # my own algorithm
        vals = to_arr(array)
        has_props = [array.has_property(str(e)) for e in xrange(len(array))]
        vals.reverse()
        has_props.reverse()
        for i, val in enumerate(vals):
            if has_props[i]:
                array.put(str(i), val)
            else:
                array.delete(str(i))
        return array
    def shift():  #todo check
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not arr_len:
            array.put('length', this.Js(0))
            return None
        first = array.get('0')
        for k in xrange(1, arr_len):
            from_s, to_s = str(k), str(k-1)
            if array.has_property(from_s):
                array.put(to_s, array.get(from_s))
            else:
                array.delete(to)
        array.delete(str(arr_len-1))
        array.put('length', this.Js(str(arr_len-1)))
        return first
    def slice(start, end): # todo check
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        relative_start = start.to_int()
        k = max((arr_len + relative_start), 0) if relative_start<0 else  min(relative_start, arr_len)
        relative_end = arr_len if end.is_undefined() else end.to_int()
        final =  max((arr_len + relative_end), 0) if relative_end<0 else min(relative_end, arr_len)
        res = []
        n = 0
        while k<final:
            pk = str(k)
            if array.has_property(pk):
                res.append(array.get(pk))
            k += 1
            n += 1
        return res
    def sort(cmpfn):
        if not this.Class in {'Array', 'Arguments'}:
            return this.to_object() # do nothing
        arr = [this.get(str(i)) for i in xrange(len(this))]
        if not arr:
            return this
        if not cmpfn.is_callable():
            cmpfn = None
        cmp = lambda a,b: sort_compare(a, b, cmpfn)
        arr.sort(cmp=cmp)
        for i in xrange(len(arr)):
            this.put(unicode(i), arr[i])
        return this
    def splice(start, deleteCount):
        # 1-8
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        relative_start = start.to_int()
        actual_start = max((arr_len + relative_start),0) if relative_start<0 else min(relative_start, arr_len)
        actual_delete_count =  min(max(deleteCount.to_int(),0 ), arr_len - actual_start)
        k = 0
        A = this.Js([])
        # 9
        while k<actual_delete_count:
            if array.has_property(str(actual_start+k)):
                A.put(str(k), array.get(str(actual_start+k)))
            k += 1
        # 10-11
        items = to_arr(arguments)[2:]
        items_len = len(items)
        # 12
        if items_len<actual_delete_count:
            k = actual_start
            while k < (arr_len-actual_delete_count):
                fr = str(k+actual_delete_count)
                to = str(k+items_len)
                if array.has_property(fr):
                    array.put(to, array.get(fr))
                else:
                    array.delete(to)
                k += 1
            k = arr_len
            while k > (arr_len - actual_delete_count + items_len):
                array.delete(str(k-1))
                k -= 1
        # 13
        elif items_len>actual_delete_count:
            k = arr_len - actual_delete_count
            while k>actual_start:
                fr = str(k + actual_delete_count - 1)
                to = str(k + items_len - 1)
                if array.has_property(fr):
                    array.put(to, array.get(fr))
                else:
                    array.delete(to)
                k -= 1
        # 14-17
        k = actual_start
        while items:
            E = items.pop(0)
            array.put(str(k), E)
            k += 1
        array.put('length', this.Js(arr_len - actual_delete_count + items_len))
        return A
    def unshift():
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        argCount = len(arguments)
        k = arr_len
        while k > 0:
            fr = str(k - 1)
            to = str(k + argCount - 1)
            if array.has_property(fr):
                array.put(to, array.get(fr))
            else:
                array.delete(to)
            k -= 1
        j = 0
        items = to_arr(arguments)
        while items:
            E = items.pop(0)
            array.put(str(j), E)
            j += 1
        array.put('length', this.Js(arr_len + argCount))
        return arr_len + argCount
    def indexOf(searchElement):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if arr_len == 0:
            return -1
        if len(arguments)>1:
            n = arguments[1].to_int()
        else:
            n = 0
        if n >= arr_len:
            return -1
        if n >= 0:
            k = n
        else:
            k = arr_len - abs(n)
            if k < 0:
                k = 0
        while k < arr_len:
            if array.has_property(str(k)):
                elementK = array.get(str(k))
                if searchElement.strict_equality_comparison(elementK):
                    return k
            k += 1
        return -1
    def lastIndexOf(searchElement):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if arr_len == 0:
            return -1
        if len(arguments)>1:
            n = arguments[1].to_int()
        else:
            n = arr_len - 1
        if n >= 0:
            k = min(n, arr_len-1)
        else:
            k = arr_len - abs(n)
        while k >= 0:
            if array.has_property(str(k)):
                elementK = array.get(str(k))
                if searchElement.strict_equality_comparison(elementK):
                    return k
            k -= 1
        return -1
    def every(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        T = arguments[1]
        k = 0
        while k<arr_len:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                if not callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
                    return False
            k += 1
        return True
    def some(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        T = arguments[1]
        k = 0
        while k<arr_len:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
                    return True
            k += 1
        return False
    def forEach(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        T = arguments[1]
        k = 0
        while k<arr_len:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                callbackfn.call(T, (kValue, this.Js(k), array))
            k+=1
    def map(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        T = arguments[1]
        A = this.Js([])
        k = 0
        while k<arr_len:
            Pk = str(k)
            if array.has_property(Pk):
                kValue = array.get(Pk)
                mappedValue = callbackfn.call(T, (kValue, this.Js(k), array))
                A.define_own_property(Pk, {'value': mappedValue, 'writable': True,
                    'enumerable': True, 'configurable': True})
            k += 1
        return A
    def filter(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        T = arguments[1]
        res = []
        k = 0
        while k<arr_len:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
                    res.append(kValue)
            k += 1
        return res # converted to js array automatically
    def reduce(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        if not arr_len and len(arguments)<2:
            raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
        k = 0
        if len(arguments)>1: # initial value present
            accumulator = arguments[1]
        else:
            kPresent = False
            while not kPresent and k<arr_len:
                kPresent = array.has_property(str(k))
                if kPresent:
                    accumulator = array.get(str(k))
                k += 1
            if not kPresent:
                raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
        while k<arr_len:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
            k += 1
        return accumulator
    def reduceRight(callbackfn):
        array = this.to_object()
        arr_len = array.get('length').to_uint32()
        if not callbackfn.is_callable():
            raise this.MakeError('TypeError', 'callbackfn must be a function')
        if not arr_len and len(arguments)<2:
            raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
        k = arr_len - 1
        if len(arguments)>1: # initial value present
            accumulator = arguments[1]
        else:
            kPresent = False
            while not kPresent and k>=0:
                kPresent = array.has_property(str(k))
                if kPresent:
                    accumulator = array.get(str(k))
                k -= 1
            if not kPresent:
                raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
        while k>=0:
            if array.has_property(str(k)):
                kValue = array.get(str(k))
                accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
            k -= 1
        return accumulator
def sort_compare(a, b, comp):
    if a is None:
        if b is None:
            return 0
        return 1
    if b is None:
        if a is None:
            return 0
        return -1
    if a.is_undefined():
        if b.is_undefined():
            return 0
        return 1
    if b.is_undefined():
        if a.is_undefined():
            return 0
        return -1
    if comp is not None:
        res = comp.call(a.undefined, (a, b))
        return res.to_int()
    x, y = a.to_string(), b.to_string()
    if x<y:
        return -1
    elif x>y:
        return 1
    return 0
 | 
	gpl-3.0 | 2,274,216,971,756,242,200 | 32.011261 | 116 | 0.492324 | false | 
| 
	Battleroid/yanker | 
	yanker/yanker.py | 
	1 | 
	2508 | 
	"""
Yanker
Usage:
    yanker [--threads=<tnum>]
"""
__version__ = '1.0.1'
import Queue
import threading
import youtube_dl as ydl
import pyperclip as clip
import time
from docopt import docopt
class ErrLogger(object):
    def debug(self, msg):
        pass
    def warning(self, msg):
        pass
    def error(self, msg):
        print msg
class Worker(threading.Thread):
    def __init__(self, tasks):
        threading.Thread.__init__(self)
        self.tasks = tasks
        self.daemon = True
        self.start()
    def run(self):
        while True:
            vid = self.tasks.get()
            vid.download()
            self.tasks.task_done()
class Video:
    def progress(self, s):
        if s['status'] == 'finished':
            print 'Finished {}'.format(s['filename'])
    def __init__(self, url, opts={}):
        self.url = url
        self.ydl_opts = {
            'progress_hooks': [self.progress],
            'logger': ErrLogger()
        }
        self.ydl_opts.update(opts)
    def download(self):
        print 'Downloading: {}'.format(self.url)
        with ydl.YoutubeDL(self.ydl_opts) as y:
            try:
                y.download([self.url])
            except ydl.DownloadError:
                print 'Unsupported URL, skipping'
class Watcher:
    def __init__(self, urls=[], threads=2):
        self.queue = Queue.Queue(0)
        self.threads = threads
        self.stopped = False
        self.grabbed_urls = set([])
        for _ in range(threads): Worker(self.queue)
    def run(self):
        recent = ''
        while not self.stopped:
            current = clip.paste()
            if recent != current:
                recent = current
                if current.startswith(('http://', 'https://',)) and current not in self.grabbed_urls:
                    print 'Added: {}'.format(current)
                    self.grabbed_urls.add(current)
                    self.queue.put(Video(current))
                elif current in self.grabbed_urls:
                    print 'Already grabbed {}'.format(current)
            time.sleep(0.25)
def run():
    args = docopt(__doc__, version='Yanker {}'.format(__version__))
    threads = args['--threads']
    if not threads:
        threads = 2
    else:
        threads = int(threads)
    print 'Starting Yanker with {} threads...'.format(threads)
    watch = Watcher(threads=threads)
    try:
        watch.run()
    except KeyboardInterrupt:
        print 'Stopping...'
        watch.stopped = True
 | 
	mit | -6,076,938,484,919,029,000 | 24.08 | 101 | 0.539474 | false | 
| 
	Traumflug/Teacup_Firmware | 
	configtool/calcscrew.py | 
	1 | 
	9429 | 
	import wx
from configtool.data import BSIZESMALL, reFloat, offsetChLabel, offsetTcLabel
class CalcScrew(wx.Dialog):
    def __init__(self, parent, font, cbUse):
        wx.Dialog.__init__(
            self,
            parent,
            wx.ID_ANY,
            "Steps calculator for screw driven axes",
            size=(400, 204),
        )
        self.SetFont(font)
        self.Bind(wx.EVT_CLOSE, self.onExit)
        self.use = cbUse
        labelWidth = 150
        hsz = wx.BoxSizer(wx.HORIZONTAL)
        hsz.Add((10, 10))
        sz = wx.BoxSizer(wx.VERTICAL)
        sz.Add((10, 10))
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self, wx.ID_ANY, "Step Angle:", size=(labelWidth, -1), style=wx.ALIGN_RIGHT
        )
        st.SetFont(font)
        lsz.Add(st, 1, wx.TOP, offsetChLabel)
        lsz.Add((5, 5))
        stepAngles = [
            "1.8 (200 per revolution)",
            "0.9 (400 per revolution)",
            "7.5 (48 per revolution)",
        ]
        self.stepAngleValues = [200, 400, 48]
        tc = wx.Choice(self, wx.ID_ANY, choices=stepAngles)
        tc.SetFont(font)
        tc.SetSelection(0)
        tc.Bind(wx.EVT_CHOICE, self.onChoice)
        lsz.Add(tc)
        tc.SetToolTip("Step angle. Depends on your type of stepper motor.")
        self.tcStep = tc
        sz.Add(lsz)
        sz.Add((10, 10))
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self,
            wx.ID_ANY,
            "Microstepping:",
            size=(labelWidth, -1),
            style=wx.ALIGN_RIGHT,
        )
        st.SetFont(font)
        lsz.Add(st, 1, wx.TOP, offsetChLabel)
        lsz.Add((5, 5))
        microStepping = [
            "1 - full step",
            "1/2 - half step",
            "1/4 - quarter step",
            "1/8",
            "1/16",
            "1/32",
            "1/64",
            "1/128",
        ]
        self.microSteppingValues = [1, 2, 4, 8, 16, 32, 64, 128]
        tc = wx.Choice(self, wx.ID_ANY, choices=microStepping)
        tc.SetFont(font)
        tc.Bind(wx.EVT_CHOICE, self.onChoice)
        tc.SetSelection(4)
        lsz.Add(tc)
        tc.SetToolTip(
            "Microstepping. Most boards allow to change this by "
            "setting jumpers. The value here must match the "
            "setting on the board in conjunction with the type "
            "of stepper driver chip."
        )
        self.tcMicroStepping = tc
        sz.Add(lsz)
        sz.Add((10, 10))
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self,
            wx.ID_ANY,
            "Screw Pitch (mm/rev):",
            size=(labelWidth, -1),
            style=wx.ALIGN_RIGHT,
        )
        st.SetFont(font)
        lsz.Add(st, 1, wx.TOP, offsetTcLabel)
        lsz.Add((5, 5))
        tc = wx.TextCtrl(self, wx.ID_ANY, "2", style=wx.TE_RIGHT)
        tc.SetFont(font)
        tc.Bind(wx.EVT_TEXT, self.onTextCtrlFloat)
        lsz.Add(tc)
        tc.SetToolTip("Screw pitch. Defined by the pitch of the screw.")
        self.tcScrewPitch = tc
        lsz.Add((5, 5))
        screwPresets = [
            "-",
            "M8 - metric (1.25 mm/rev)",
            "M6 - metric (1 mm/rev)",
            "M5 - metric (0.8 mm/rev)",
            "12 (12 mm/rev)",
            "16 (16 mm/rev)",
            "25 (25 mm/rev)",
            '5/15"-18 imperial coarse (1.41111 mm/rev)',
            '3/16"-20 imperial (1.270 mm/rev)',
            '1/4"-16 ACME (1.5875 mm/rev)',
        ]
        self.screwPresetValues = [
            -1,
            1.25,
            1.00,
            0.8,
            12.0,
            16.0,
            25.0,
            1.41111,
            1.270,
            1.5875,
        ]
        tc = wx.Choice(self, wx.ID_ANY, choices=screwPresets)
        tc.SetFont(font)
        tc.SetSelection(0)
        tc.Bind(wx.EVT_CHOICE, self.onPresetChoice)
        lsz.Add(tc)
        tc.SetToolTip("Screw pitch presets.")
        self.tcPresets = tc
        sz.Add(lsz)
        sz.Add((10, 10))
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self, wx.ID_ANY, "Gear Ratio:", size=(labelWidth, -1), style=wx.ALIGN_RIGHT
        )
        st.SetFont(font)
        lsz.Add(st, 1, wx.TOP, offsetTcLabel)
        lsz.Add((5, 5))
        tc = wx.TextCtrl(self, wx.ID_ANY, "1", size=(40, -1), style=wx.TE_RIGHT)
        tc.SetFont(font)
        tc.Bind(wx.EVT_TEXT, self.onTextCtrlFloat)
        lsz.Add(tc)
        tc.SetToolTip("Gear ratio. 1:1 if there is no gear.")
        self.tcRatioTop = tc
        lsz.Add((5, 5))
        st = wx.StaticText(self, wx.ID_ANY, ":")
        st.SetFont(font)
        lsz.Add(st)
        lsz.Add((5, 5))
        tc = wx.TextCtrl(self, wx.ID_ANY, "1", size=(40, -1), style=wx.TE_RIGHT)
        tc.SetFont(font)
        tc.Bind(wx.EVT_TEXT, self.onTextCtrlFloat)
        lsz.Add(tc)
        tc.SetToolTip("Gear ratio. 1:1 if there is no gear.")
        self.tcRatioBottom = tc
        sz.Add(lsz)
        sz.Add((30, 30))
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self, wx.ID_ANY, "Result:", size=(labelWidth, -1), style=wx.ALIGN_RIGHT
        )
        st.SetFont(font)
        lsz.Add(st)
        lsz.Add((5, 5))
        tc = wx.StaticText(self, wx.ID_ANY, "", size=(300, -1), style=wx.ALIGN_LEFT)
        tc.SetFont(font)
        lsz.Add(tc)
        self.tcResult = tc
        sz.Add(lsz)
        lsz = wx.BoxSizer(wx.HORIZONTAL)
        st = wx.StaticText(
            self, wx.ID_ANY, "Resolution:", size=(labelWidth, -1), style=wx.ALIGN_RIGHT
        )
        st.SetFont(font)
        lsz.Add(st)
        lsz.Add((5, 5))
        tc = wx.StaticText(self, wx.ID_ANY, "", size=(300, -1), style=wx.ALIGN_LEFT)
        tc.SetFont(font)
        lsz.Add(tc)
        self.tcResolution = tc
        sz.Add(lsz)
        sz.Add((20, 20))
        bsz = wx.BoxSizer(wx.HORIZONTAL)
        b = wx.Button(self, wx.ID_ANY, "Use for X", size=BSIZESMALL)
        b.SetFont(font)
        self.Bind(wx.EVT_BUTTON, self.onUseForX, b)
        bsz.Add(b)
        self.bUseForX = b
        bsz.Add((5, 5))
        b = wx.Button(self, wx.ID_ANY, "Use for Y", size=BSIZESMALL)
        b.SetFont(font)
        self.Bind(wx.EVT_BUTTON, self.onUseForY, b)
        bsz.Add(b)
        self.bUseForY = b
        bsz.Add((5, 5))
        b = wx.Button(self, wx.ID_ANY, "Use for Z", size=BSIZESMALL)
        b.SetFont(font)
        self.Bind(wx.EVT_BUTTON, self.onUseForZ, b)
        bsz.Add(b)
        self.bUseForZ = b
        bsz.Add((5, 5))
        b = wx.Button(self, wx.ID_ANY, "Use for E", size=BSIZESMALL)
        b.SetFont(font)
        self.Bind(wx.EVT_BUTTON, self.onUseForE, b)
        bsz.Add(b)
        self.bUseForE = b
        sz.Add(bsz, flag=wx.ALIGN_CENTER_HORIZONTAL)
        sz.Add((10, 10))
        hsz.Add(sz)
        hsz.Add((10, 10))
        self.enableUseButtons(False)
        self.SetSizer(hsz)
        self.Fit()
        self.calculate()
    def calculate(self):
        self.tcResult.SetLabel("")
        self.tcResolution.SetLabel("")
        self.enableUseButtons(False)
        s = self.tcStep.GetSelection()
        sv = self.stepAngleValues[s]
        try:
            sp = float(self.tcScrewPitch.GetValue())
        except:
            return
        try:
            ratioA = float(self.tcRatioTop.GetValue())
        except:
            return
        try:
            ratioB = float(self.tcRatioBottom.GetValue())
        except:
            return
        s = self.tcMicroStepping.GetSelection()
        msv = self.microSteppingValues[s]
        ratio = ratioA / ratioB
        steps = sv * msv
        resultmm = steps / sp / ratio
        self.result = int(resultmm * 1000.0)
        self.tcResult.SetLabel("%d steps/m   (%.3f steps/mm)" % (self.result, resultmm))
        self.tcResolution.SetLabel("%.3f micrometers" % (1.0 / resultmm * 1000.0))
        self.enableUseButtons(True)
    def enableUseButtons(self, flag):
        self.bUseForX.Enable(flag)
        self.bUseForY.Enable(flag)
        self.bUseForZ.Enable(flag)
        self.bUseForE.Enable(flag)
    def onUseForX(self, evt):
        self.use("STEPS_PER_M_X", self.result)
    def onUseForY(self, evt):
        self.use("STEPS_PER_M_Y", self.result)
    def onUseForZ(self, evt):
        self.use("STEPS_PER_M_Z", self.result)
    def onUseForE(self, evt):
        self.use("STEPS_PER_M_E", self.result)
    def onPresetChoice(self, evt):
        s = self.tcPresets.GetSelection()
        sv = self.screwPresetValues[s]
        if sv < 0:
            return
        s = "%f" % sv
        s = s.rstrip("0")
        if s[-1] == ".":
            s += "0"
        self.tcScrewPitch.SetValue(s)
    def onChoice(self, evt):
        self.calculate()
    def onTextCtrlFloat(self, evt):
        tc = evt.GetEventObject()
        w = tc.GetValue().strip()
        if w == "":
            valid = False
        else:
            m = reFloat.match(w)
            if m:
                valid = True
            else:
                valid = False
        if valid:
            tc.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW))
        else:
            tc.SetBackgroundColour("pink")
        tc.Refresh()
        self.calculate()
        evt.Skip()
    def onExit(self, evt):
        self.EndModal(wx.ID_OK)
 | 
	gpl-2.0 | -6,171,746,121,561,787,000 | 26.814159 | 88 | 0.509492 | false | 
| 
	BadWizard/Inflation | 
	SPF/source/get_clean_GDP.py | 
	1 | 
	3646 | 
	
def get_clean_HICP(data_dir="../data/",
                   output_dir="../clean_data/",
                   year=2016,
                   quarter=1):
    '''
        This function takes a raw csv file from SPF
        and creates a new csv file with forecasts
        only for HICP
    '''
    import pandas as pd
    import numpy as np
    import os
# this is fixed list, will use it to create a new target
    months = ['Dec', 'Mar', 'Jun', 'Sep']
# this is the raw file name
    raw_file = str(year) + 'Q' + str(quarter) + '.csv'
# this is the name of the file that will be saved
    output_file = 'HICP' + raw_file
# full path of the input file
    fname = data_dir + raw_file
# check if the input file exists
    if not os.path.isfile(fname):
        print('File ' + fname + 'is not available')
    else:
        raw_df = pd.read_csv(fname, header=1)
# find the row where the growth expectations start
        dum = raw_df[raw_df['TARGET_PERIOD'] ==
                    'GROWTH EXPECTATIONS; YEAR-ON-YEAR CHANGE IN REAL GDP'].index[0]
        mask_columns = ~raw_df.columns.str.contains('Unnamed')
        df = raw_df.iloc[0:dum-1, mask_columns]
        df = df.rename(columns={'TARGET_PERIOD':'target','FCT_SOURCE':'id','POINT':'point',
                                       'TN1_0':'[-2.0,-1.1]','FN1_0TN0_6':'[-1.0,-0.6]',
                                       'FN0_5TN0_1':'[-0.5,-0.1]','F0_0T0_4':'[0.0,0.4]',
                                       'F0_5T0_9':'[0.5,0.9]','F1_0T1_4':'[1.0,1.4]',
                                       'F1_5T1_9':'[1.5,1.9]','F2_0T2_4':'[2.0,2.4]',
                                       'F2_5T2_9':'[2.5,2.9]','F3_0T3_4':'[3.0,3.4]',
                                       'F3_5T3_9':'[3.5,3.9]','F4_0':'[4.0,5.0]'})
    # remove rows where point is missing
        maskNaN = df.point.isnull()
        df = df[~maskNaN]
        df.fillna(0, inplace=True)
        for colname in df.columns[3:]:
            df[colname] = df[colname].astype('float')
    # create a new target column
        mask_t0 = str(year)
        mask_t1 = str(year+1)
        mask_t2 = str(year+2)
        if quarter < 3:
            mask_t4or5 = str(year+4)
        else:
            mask_t4or5 = str(year+5)
        if quarter == 1:
            mask_Rt1 = str(year) + months[quarter-1]
            mask_Rt2 = str(year+1) + months[quarter-1]
        else:
            mask_Rt1 = str(year+1) + months[quarter-1]
            mask_Rt2 = str(year+2) + months[quarter-1]
        df.loc[df.loc[:, 'target'] == mask_t0, 'targetNew'] = 't'
        df.loc[df.loc[:, 'target'] == mask_t1, 'targetNew'] = 't+1'
        df.loc[df.loc[:, 'target'] == mask_t2, 'targetNew'] = 't+2'
        df.loc[df.loc[:, 'target'] == mask_t4or5, 'targetNew'] = 't+4'
        df.loc[df.loc[:, 'target'] == mask_Rt1, 'targetNew'] = 'roll 1'
        df.loc[df.loc[:, 'target'] == mask_Rt2, 'targetNew'] = 'roll 2'
        df['source'] = str(year) + '-Q' + str(quarter)
        df = df[['source', 'target', 'targetNew', 'id', 'point', '[-2.0,-1.1]',
                 '[-1.0,-0.6]', '[-0.5,-0.1]', '[0.0,0.4]', '[0.5,0.9]',
                 '[1.0,1.4]', '[1.5,1.9]', '[2.0,2.4]', '[2.5,2.9]', '[3.0,3.4]',
                 '[3.5,3.9]', '[4.0,5.0]']]
    # save to a csv file
        df.to_csv(output_dir + output_file, index=False)
def main():
    data_dir = "../data/"
    year = 2016
    quarter = 1
    output_dir = "../clean_data/"
    for y in [2015, 2016]:
        for q in [1, 2, 3, 4]:
            get_clean_HICP(data_dir=data_dir,
                   output_dir=output_dir,
                   year=y,
                   quarter=q)
main()
 | 
	mit | 8,939,961,401,442,536,000 | 36.204082 | 91 | 0.472024 | false | 
| 
	factorlibre/carrier-delivery | 
	delivery_carrier_mrw/model/stock.py | 
	1 | 
	8462 | 
	# -*- encoding: utf-8 -*-
##############################################################################
#
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2015 FactorLibre (http://www.factorlibre.com)
#                  Hugo Santos <[email protected]>
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import urllib
from datetime import datetime
from openerp import models, fields, api, exceptions
from openerp.tools.translate import _
from ..webservice.mrw_api import MrwEnvio
class StockPicking(models.Model):
    _inherit = 'stock.picking'
    @api.model
    def _get_mrw_service_type(self):
        return [
            ('0000', 'Urgente 10'),
            ('0005', 'Urgente Hoy'),
            ('0100', 'Urgente 12'),
            ('0110', 'Urgente 14'),
            ('0120', 'Urgente 22'),
            ('0200', 'Urgente 19'),
            ('0205', 'Urgente 19 Expedicion'),
            ('0210', 'Urgente 19 Mas 40 kilos'),
            ('0220', 'Urgente 19 Portugal'),
            ('0230', 'Bag 19'),
            ('0235', 'Bag 14'),
            ('0300', 'Economico'),
            ('0310', 'Economico Mas 40 Kilos'),
            ('0350', 'Economico Interinsular'),
            ('0400', 'Express Documentos'),
            ('0450', 'Express 2 Kilos'),
            ('0480', 'Caja Express 3 Kilos'),
            ('0490', 'Documentos 14'),
            ('0800', 'Ecommerce')
        ]
    mrw_service_type = fields.Selection(
        '_get_mrw_service_type', string='Mrw Service')
    mrw_frequence = fields.Selection(
        (('1', 'Frecuencia 1'), ('2', 'Frecuencia 2')), string='Mrw Frequence')
    @api.multi
    def _mrw_transm_envio_request(self, mrw_api):
        self.ensure_one()
        client = mrw_api.client
        transm_envio = client.factory.create('TransmEnvioRequest')
        warehouse_address = self.picking_type_id.warehouse_id.partner_id
        pickup_address = transm_envio.DatosRecogida.Direccion
        pickup_address.Via = warehouse_address.street
        if warehouse_address.street2:
            pickup_address.Resto = warehouse_address.street2
        # TODO: Comprobar que hacer con el resto de codigos postales
        # Llevan un formato especial por país
        # - España: Poner los 5 dígitos. (Ej: 05200 para Ávila)
        # - Portugal: Poner sólo los 4 primeros dígitos de los 7.
        #   (Ej: 1200 para Lisboa)
        # - Andorra: Deben ser 5 dígitos, por lo que se pondrá un 0 delante del
        #   mismo (p. ej 00500 para Andorra la Vella)
        pickup_address.CodigoPostal = warehouse_address.zip.zfill(5)
        pickup_address.Poblacion = warehouse_address.city
        pickup_address.Provincia = warehouse_address.state_id.name or ''
        pickup_address.CodigoPais = warehouse_address.country_id.code or ''
        transm_envio.DatosRecogida.Nif = warehouse_address.vat or ''
        transm_envio.DatosRecogida.Nombre = warehouse_address.name
        transm_envio.DatosRecogida.Telefono = warehouse_address.phone or ''
        shipping_address = transm_envio.DatosEntrega.Direccion
        shipping_address.Via = self.partner_id.street
        shipping_address.Resto = self.partner_id.street2 or ''
        shipping_address.CodigoPostal = self.partner_id.zip
        shipping_address.Poblacion = self.partner_id.city
        shipping_address.Provincia = self.partner_id.state_id.name or ''
        shipping_address.CodigoPais = self.partner_id.country_id.name or ''
        transm_envio.DatosEntrega.Nif = self.partner_id.vat or ''
        transm_envio.DatosEntrega.Nombre = self.partner_id.name or ''
        transm_envio.DatosEntrega.Telefono = self.partner_id.phone or ''
        # Datos Servicio
        service_data = transm_envio.DatosServicio
        service_data.Fecha = datetime.strftime(
            fields.Datetime.from_string(self.date_done), '%d/%m/%Y')
        service_data.Referencia = self.name
        service_data.EnFranquicia = 'N'
        service_data.CodigoServicio = self.mrw_service_type
        service_data.NumeroBultos = self.number_of_packages or 1
        service_data.Peso = self.weight or 1
        if self.mrw_frequence:
            service_data.Frecuencia = self.mrw_frequence
        # TODO: Servicio Rembolso
        # Reembolso: indicador opcional de reembolso. Valores posibles:
        # - N: (default) Sin reembolso.
        # - O: Con reembolso comisión en origen.
        # - D: Con reembolso comisión en destino.
        # ImporteReembolso:
        # - importe nominal del reembolso (para envíos con reembolso)
        if self.partner_id.email:
            notification_request = client.factory.create('NotificacionRequest')
            notification_request.CanalNotificacion = "1"
            notification_request.TipoNotificacion = "4"
            notification_request.MailSMS = self.partner_id.email
            service_data.Notificaciones.NotificacionRequest.append(
                notification_request)
        return transm_envio
    @api.multi
    def _mrw_etiqueta_envio_request(self, mrw_api, shipping_number):
        self.ensure_one()
        client = mrw_api.client
        label_factory = client.factory.create('EtiquetaEnvioRequest')
        label_factory.NumeroEnvio = shipping_number
        label_factory.ReportTopMargin = "1100"
        label_factory.ReportLeftMargin = "650"
        return label_factory
    @api.multi
    def _generate_mrw_label(self, package_ids=None):
        self.ensure_one()
        if not self.carrier_id.mrw_config_id:
            raise exceptions.Warning(_('No MRW Config defined in carrier'))
        if not self.picking_type_id.warehouse_id.partner_id:
            raise exceptions.Warning(
                _('Please define an address in the %s warehouse') % (
                    self.warehouse_id.name))
        mrw_api = MrwEnvio(self.carrier_id.mrw_config_id)
        client = mrw_api.client
        transm_envio = self._mrw_transm_envio_request(mrw_api)
        response = client.service.TransmEnvio(transm_envio)
        if response.Estado != '1' and not response.NumeroEnvio:
            raise exceptions.Warning(response.Mensaje)
        label_factory = self._mrw_etiqueta_envio_request(mrw_api,
                                                         response.NumeroEnvio)
        label_response = client.service.EtiquetaEnvio(label_factory)
        if label_response.Estado != '1':
            raise exceptions.Warning(response.Mensaje)
        label = {
            'file': label_response.EtiquetaFile.decode('base64'),
            'file_type': 'pdf',
            'name': response.NumeroEnvio + '.pdf',
        }
        return [label]
    @api.multi
    def _get_mrw_label_from_url(self, shipping_number):
        self.ensure_one()
        mrw_config = self.carrier_id.mrw_config_id
        url = "http://sagec.mrw.es"
        if mrw_config.is_test:
            url = "http://sagec-test.mrw.es"
        params = {
            'Franq': mrw_config.franchise_code,
            'Ab': mrw_config.subscriber_code,
            'Dep': mrw_config.department_code or '',
            'Usr': mrw_config.username,
            'Pwd': mrw_config.password,
            'NumEnv': shipping_number
        }
        url_params = urllib.urlencode(params)
        # Generar etiqueta en Panel
        panel_url = u"{0}/Panel.aspx?{1}".format(url, url_params)
        return panel_url
    @api.multi
    def generate_shipping_labels(self, package_ids=None):
        """ Add label generation for MRW """
        self.ensure_one()
        if self.carrier_id.type == 'mrw':
            return self._generate_mrw_label(package_ids=package_ids)
        return super(StockPicking, self).generate_shipping_labels(
            package_ids=package_ids)
 | 
	agpl-3.0 | -7,478,398,197,497,246,000 | 40.024272 | 79 | 0.609869 | false | 
| 
	xlevus/python-diana | 
	docs/source/conf.py | 
	1 | 
	9861 | 
	# -*- coding: utf-8 -*-
#
# Diana documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 22 12:48:46 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.doctest",
    "sphinx.ext.intersphinx",
    "sphinx.ext.todo",
    "sphinx.ext.coverage",
    "sphinx.ext.viewcode",
    "sphinx.ext.githubpages",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"Diana"
copyright = u"2016, Chris Targett"
author = u"Chris Targett"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u"0.1.0"
# The full version, including alpha/beta/rc tags.
release = u"0.1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further.  For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'Diana v0.1.0'
# A shorter title for the navigation bar.  Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it.  The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "Dianadoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
    # The paper size ('letterpaper' or 'a4paper').
    #
    # 'papersize': 'letterpaper',
    # The font size ('10pt', '11pt' or '12pt').
    #
    # 'pointsize': '10pt',
    # Additional stuff for the LaTeX preamble.
    #
    # 'preamble': '',
    # Latex figure (float) alignment
    #
    # 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
#  author, documentclass [howto, manual, or own class]).
latex_documents = [
    (master_doc, "Diana.tex", u"Diana Documentation", u"Chris Targett", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "diana", u"Diana Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
#  dir menu entry, description, category)
texinfo_documents = [
    (
        master_doc,
        "Diana",
        u"Diana Documentation",
        author,
        "Diana",
        "One line description of project.",
        "Miscellaneous",
    ),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
 | 
	mit | -4,637,041,529,167,908,000 | 27.749271 | 82 | 0.689585 | false | 
| 
	alphagov/digitalmarketplace-admin-frontend | 
	tests/app/main/views/test_outcomes.py | 
	1 | 
	9064 | 
	import csv
import mock
import pytest
from dmtestutils.api_model_stubs import FrameworkStub
from ...helpers import LoggedInApplicationTest
class TestDirectAwardView(LoggedInApplicationTest):
    def setup_method(self, method):
        super().setup_method(method)
        self.data_api_client_patch = mock.patch('app.main.views.outcomes.data_api_client', autospec=True)
        self.data_api_client = self.data_api_client_patch.start()
    def teardown_method(self, method):
        self.data_api_client_patch.stop()
        super().teardown_method(method)
    @pytest.mark.parametrize("role,expected_code", [
        ("admin", 403),
        ("admin-manager", 403),
        ("admin-ccs-category", 200),
        ("admin-ccs-sourcing", 200),
        ("admin-framework-manager", 200),
    ])
    def test_outcomes_csv_download_permissions(self, role, expected_code):
        self.user_role = role
        response = self.client.get('/admin/direct-award/outcomes')
        actual_code = response.status_code
        assert actual_code == expected_code, "Unexpected response {} for role {}".format(actual_code, role)
    def test_outcomes_csv_download_content(self):
        self.user_role = 'admin-ccs-sourcing'
        find_direct_award_projects_result = {
            "links": {
                "self": "http://localhost:5000/direct-award/projects?latest-first=1&user-id=19175"
            },
            "meta": {
                "total": 20
            },
            "projects": [
                {
                    "active": True,
                    "createdAt": "2018-06-22T10:41:31.281853Z",
                    "downloadedAt": None,
                    "id": 731851428862851,
                    "lockedAt": None,
                    "name": "gfgffd",
                    "outcome": {
                        "result": "cancelled"
                    },
                    "users": [
                        {
                            "active": True,
                            "emailAddress": "[email protected]",
                            "id": 123,
                            "name": "A Buyer",
                            "role": "buyer"
                        }
                    ]
                },
                {
                    "active": True,
                    "createdAt": "2018-06-19T13:36:37.557144Z",
                    "downloadedAt": "2018-06-19T13:37:30.849304Z",
                    "id": 272774709812396,
                    "lockedAt": "2018-06-19T13:37:03.176398Z",
                    "name": "22",
                    "outcome": {
                        "award": {
                            "awardValue": "1234.00",
                            "awardingOrganisationName": "123321",
                            "endDate": "2020-12-12",
                            "startDate": "2002-12-12"
                        },
                        "completed": True,
                        "completedAt": "2018-06-19T13:37:59.713497Z",
                        "id": 680306864633356,
                        "result": "awarded",
                        "resultOfDirectAward": {
                            "archivedService": {
                                "id": 266018,
                                "service": {
                                    "id": "316684326093280"
                                }
                            },
                            "project": {
                                "id": 272774709812396
                            },
                            "search": {
                                "id": 3706
                            }
                        }
                    },
                    "users": [
                        {
                            "active": True,
                            "emailAddress": "[email protected]",
                            "id": 123,
                            "name": "A Buyer",
                            "role": "buyer"
                        }
                    ]
                }
            ]
        }
        get_archived_service_result = {
            'services': {
                'supplierId': 266018,
                'supplierName': 'Somerford Associates Limited',
                'serviceName': 'testServiceName'
            }
        }
        self.data_api_client.get_archived_service.return_value = get_archived_service_result
        self.data_api_client.find_direct_award_projects.return_value = find_direct_award_projects_result
        response = self.client.get('/admin/direct-award/outcomes')
        assert response.status_code == 200
        assert response.content_type == 'text/csv; charset=utf-8'
        response_data = str(response.data, 'utf-8').splitlines()  # convert byte-string to string
        data = csv.reader(response_data)
        assert data  # checks if CSV is valid
        rows = []
        for row in data:
            rows.append(row)
        # checks that only awarded outcomes are shown
        assert len(rows) == 2
        # checks headers
        assert rows[0] == [
            'ID', 'Name', 'Submitted at', 'Result',
            'Award service ID', 'Award service name',
            'Award supplier id', 'Award supplier name',
            'Award value', 'Awarding organisation name',
            'Award start date', 'Award end date',
            'User id', 'User name', 'User email'
        ]
        # checks results
        assert rows[1] == [
            '272774709812396', '22', '2018-06-19T13:37:59.713497Z', 'awarded',
            '316684326093280', 'testServiceName', '266018', 'Somerford Associates Limited',
            '1234.00', '123321', '2002-12-12', '2020-12-12',
            '123', 'A Buyer', '[email protected]'
        ]
class TestDOSView(LoggedInApplicationTest):
    url = "/admin/digital-outcomes-and-specialists/outcomes"
    def setup_method(self, method):
        super().setup_method(method)
        self.data_api_client_patch = mock.patch('app.main.views.outcomes.data_api_client', autospec=True)
        self.data_api_client = self.data_api_client_patch.start()
        self.data_api_client.find_frameworks.return_value = {"frameworks": [
            FrameworkStub(
                slug="digital-outcomes-and-specialists-4", status="live"
            ).response()
        ]}
    def teardown_method(self, method):
        self.data_api_client_patch.stop()
        super().teardown_method(method)
    @pytest.fixture(autouse=True)
    def s3(self):
        with mock.patch("app.main.views.outcomes.s3") as s3:
            bucket = s3.S3()
            bucket.get_signed_url.side_effect = \
                lambda path: f"https://s3.example.com/{path}?signature=deadbeef"
            yield s3
    @pytest.mark.parametrize("role,expected_code", [
        ("admin", 403),
        ("admin-manager", 403),
        ("admin-ccs-category", 302),
        ("admin-ccs-sourcing", 302),
        ("admin-framework-manager", 302),
    ])
    def test_download_permissions(self, role, expected_code):
        self.user_role = role
        response = self.client.get(self.url)
        actual_code = response.status_code
        assert actual_code == expected_code, "Unexpected response {} for role {}".format(actual_code, role)
    def test_redirects_to_assets_domain(self):
        self.user_role = "admin-ccs-category"
        response = self.client.get(self.url)
        assert response.status_code == 302
        assert response.location \
            == "https://assets.test.digitalmarketplace.service.gov.uk" \
            "/digital-outcomes-and-specialists-4/reports/opportunity-data.csv" \
            "?signature=deadbeef"
    @pytest.mark.parametrize("latest_dos_framework", (
        "digital-outcomes-and-specialists-4",
        "digital-outcomes-and-specialists-5",
    ))
    def test_csv_is_for_latest_live_dos_framework(self, latest_dos_framework, s3):
        self.user_role = "admin-ccs-category"
        self.data_api_client.find_frameworks.return_value = {"frameworks": [
            FrameworkStub(
                framework_live_at="2016-03-03 12:00:00",
                slug="digital-outcomes-and-specialists",
                status="expired"
            ).response(),
            FrameworkStub(
                framework_live_at="2018-10-01 10:58:09.43134",
                slug="digital-outcomes-and-specialists-3",
                status="live"
            ).response(),
            FrameworkStub(
                framework_live_at="2019-12-18 15:13:24.53636",
                slug=latest_dos_framework,
                status="live"
            ).response(),
            FrameworkStub(
                framework_live_at="2020-12-18 15:13:24.53636",
                slug="g-cloud-12",
                status="live"
            ).response(),
        ]}
        response = self.client.get(self.url)
        assert s3.S3().get_signed_url.call_args == mock.call(
            f"{latest_dos_framework}/reports/opportunity-data.csv"
        )
        assert latest_dos_framework in response.location
 | 
	mit | -5,713,912,630,763,795,000 | 36.924686 | 107 | 0.492056 | false | 
| 
	dc3-plaso/dfvfs | 
	tests/path/os_path_spec.py | 
	1 | 
	1764 | 
	#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the operating system path specification implementation."""
import platform
import unittest
from dfvfs.path import os_path_spec
from tests.path import test_lib
class OSPathSpecTest(test_lib.PathSpecTestCase):
  """Tests for the operating system path specification implementation."""
  def testInitialize(self):
    """Tests the path specification initialization."""
    if platform.system() == u'Windows':
      test_location = u'C:\\test'
    else:
      test_location = u'/test'
    path_spec = os_path_spec.OSPathSpec(location=test_location)
    self.assertIsNotNone(path_spec)
    with self.assertRaises(ValueError):
      _ = os_path_spec.OSPathSpec(
          location=test_location, parent=self._path_spec)
    with self.assertRaises(ValueError):
      _ = os_path_spec.OSPathSpec(location=test_location, bogus=u'BOGUS')
  def testComparable(self):
    """Tests the path specification comparable property."""
    if platform.system() == u'Windows':
      test_location = u'C:\\test'
    else:
      test_location = u'/test'
    path_spec = os_path_spec.OSPathSpec(location=test_location)
    self.assertIsNotNone(path_spec)
    expected_comparable = u'\n'.join([
        u'type: OS, location: {0:s}'.format(test_location),
        u''])
    self.assertEqual(path_spec.comparable, expected_comparable)
  def testIsSystemLevel(self):
    """Tests the IsSystemLevel function."""
    if platform.system() == u'Windows':
      test_location = u'C:\\test'
    else:
      test_location = u'/test'
    path_spec = os_path_spec.OSPathSpec(location=test_location)
    self.assertIsNotNone(path_spec)
    self.assertTrue(path_spec.IsSystemLevel())
if __name__ == '__main__':
  unittest.main()
 | 
	apache-2.0 | -6,657,228,756,776,693,000 | 26.138462 | 73 | 0.674036 | false | 
| 
	georgeouzou/survgr | 
	transform/migrations/0001_initial.py | 
	1 | 
	1205 | 
	# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-22 14:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
    initial = True
    dependencies = [
    ]
    operations = [
        migrations.CreateModel(
            name='Hattblock',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('name', models.CharField(max_length=25)),
                ('center_lon', models.FloatField()),
                ('center_lat', models.FloatField()),
                ('geometry', models.CharField(max_length=255)),
            ],
        ),
        migrations.CreateModel(
            name='OKXECoefficient',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('type', models.CharField(max_length=3)),
                ('value', models.FloatField()),
                ('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='transform.Hattblock')),
            ],
        ),
    ]
 | 
	mit | 4,114,823,400,723,477,000 | 33.428571 | 116 | 0.561826 | false | 
| 
	fazalmajid/temboz | 
	tembozapp/normalize.py | 
	1 | 
	32692 | 
	# -*- coding: iso-8859-1 -*-
from __future__ import print_function
import sys, time, re, codecs, string, traceback, socket, hashlib
import unicodedata, requests, feedparser
from . import param, transform, util, porter2
import bleach
try:
  import html.entities as htmlentitydefs
except ImportError:
  import htmlentitydefs
# XXX TODO
#
# XXX normalize feed['title'] to quote & "
#
# XXX Many of these heuristics have probably been addressed by newer versions
# XXX of feedparser.py
#date_fmt = '%a, %d %b %Y %H:%M:%S %Z'
date_fmt = '%Y-%m-%d %H:%M:%S'
try:
  try:
    import ctranslitcodec as translitcodec
  except ImportError:
    import translitcodec
  def strip_diacritics(s):
    return translitcodec.short_encode(s)[0]
except ImportError:
  # strip diacritics. Unicode normalization form D (NFD) maps letters with
  # diacritics into the base letter followed by a combining diacritic, all
  # we need to do is get rid of the combining diacritics
  # this probably does not work with exotic characters like
  # U+FDF2 (Arabic ligature Allah)
  def stripc(c):
    return unicodedata.normalize('NFD', c)[0]
  def strip_diacritics(s):
    return ''.join(map(stripc, s))
# XXX need a good way to support languages other than English and French
stop_words = ['i', 't', 'am', 'no', 'do', 's', 'my', 'don', 'm', 'on',
              'get', 'in', 'you', 'me', 'd', 've']
# list originally from: http://bll.epnet.com/help/ehost/Stop_Words.htm
stop_words += ['a', 'the', 'of', 'and', 'that', 'for', 'by', 'as', 'be',
'or', 'this', 'then', 'we', 'which', 'with', 'at', 'from', 'under',
'such', 'there', 'other', 'if', 'is', 'it', 'can', 'now', 'an', 'to',
'but', 'upon', 'where', 'these', 'when', 'whether', 'also', 'than',
'after', 'within', 'before', 'because', 'without', 'however',
'therefore', 'between', 'those', 'since', 'into', 'out', 'some', 'about',
'accordingly', 'again', 'against', 'all', 'almost', 'already',
'although', 'always', 'among', 'any', 'anyone', 'apparently', 'are',
'arise', 'aside', 'away', 'became', 'become', 'becomes', 'been', 'being',
'both', 'briefly', 'came', 'cannot', 'certain', 'certainly', 'could',
'etc', 'does', 'done', 'during', 'each', 'either', 'else', 'ever',
'every', 'further', 'gave', 'gets', 'give', 'given', 'got', 'had',
'hardly', 'has', 'have', 'having', 'here', 'how', 'itself', 'just',
'keep', 'kept', 'largely', 'like', 'made', 'mainly', 'make', 'many',
'might', 'more', 'most', 'mostly', 'much', 'must', 'nearly',
'necessarily', 'neither', 'next', 'none', 'nor', 'normally', 'not',
'noted', 'often', 'only', 'our', 'put', 'owing', 'particularly',
'perhaps', 'please', 'potentially', 'predominantly', 'present',
'previously', 'primarily', 'probably', 'prompt', 'promptly', 'quickly',
'quite', 'rather', 'readily', 'really', 'recently', 'regarding',
'regardless', 'relatively', 'respectively', 'resulted', 'resulting',
'results', 'said', 'same', 'seem', 'seen', 'several', 'shall', 'should',
'show', 'showed', 'shown', 'shows', 'significantly', 'similar',
'similarly', 'slightly', 'so', 'sometime', 'somewhat', 'soon',
'specifically', 'strongly', 'substantially', 'successfully',
'sufficiently', 'their', 'theirs', 'them', 'they', 'though', 'through',
'throughout', 'too', 'toward', 'unless', 'until', 'use', 'used', 'using',
'usually', 'various', 'very', 'was', 'were', 'what', 'while', 'who',
'whose', 'why', 'widely', 'will', 'would', 'yet']
# French stop words
# list originally from: http://www.up.univ-mrs.fr/veronis/data/antidico.txt
# XXX it's not good practice to mix languages like this, we should use
# XXX feed language metadata and track what language a content is written in
# XXX but that would require significant data model changes
dec = (lambda s: unicode(s, 'iso8859-15')) if sys.version < '3' else str
stop_words += [strip_diacritics(dec(s)) for s in [
  "a", "A", "à", "afin", "ah", "ai", "aie", "aient", "aies", "ailleurs",
  "ainsi", "ait", "alentour", "alias", "allais", "allaient",
  "allait", "allons", "allez", "alors", "Ap.", "Apr.", "après",
  "après-demain", "arrière", "as", "assez", "attendu", "au", "aucun",
  "aucune", "au-dedans", "au-dehors", "au-delà", "au-dessous",
  "au-dessus", "au-devant", "audit", "aujourd'", "aujourd'hui",
  "auparavant", "auprès", "auquel", "aura", "aurai", "auraient",
  "aurais", "aurait", "auras", "aurez", "auriez", "aurions",
  "aurons", "auront", "aussi", "aussitôt", "autant", "autour", "autre",
  "autrefois", "autres", "autrui", "aux", "auxdites", "auxdits",
  "auxquelles", "auxquels", "avaient", "avais", "avait", "avant",
  "avant-hier", "avec", "avez", "aviez", "avions", "avoir", "avons",
  "ayant", "ayez", "ayons", "B", "bah", "banco", "bé", "beaucoup", "ben",
  "bien", "bientôt", "bis", "bon", "ç'", "c.-à-d.", "Ca", "ça", "çà",
  "cahin-caha", "car", "ce", "-ce", "céans", "ceci", "cela", "celle",
  "celle-ci", "celle-là", "celles", "celles-ci", "celles-là", "celui",
  "celui-ci", "celui-là", "cent", "cents", "cependant", "certain",
  "certaine", "certaines", "certains", "certes", "ces", "c'est-à-dire",
  "cet", "cette", "ceux", "ceux-ci", "ceux-là", "cf.", "cg", "cgr",
  "chacun", "chacune", "chaque", "cher", "chez", "ci", "-ci", "ci-après",
  "ci-dessous", "ci-dessus", "cinq", "cinquante", "cinquante-cinq",
  "cinquante-deux", "cinquante-et-un", "cinquante-huit",
  "cinquante-neuf", "cinquante-quatre", "cinquante-sept",
  "cinquante-six", "cinquante-trois", "cl", "cm", "cm²", "combien",
  "comme", "comment", "contrario", "contre", "crescendo", "D", "d'",
  "d'abord", "d'accord", "d'affilée", "d'ailleurs", "dans", "d'après",
  "d'arrache-pied", "davantage", "de", "debout", "dedans", "dehors",
  "déjà", "delà", "demain", "d'emblée", "depuis", "derechef",
  "derrière", "des", "dès", "desdites", "desdits", "désormais",
  "desquelles", "desquels", "dessous", "dessus", "deux", "devant",
  "devers", "dg", "die", "différentes", "différents", "dire", "dis",
  "disent", "dit", "dito", "divers", "diverses", "dix", "dix-huit",
  "dix-neuf", "dix-sept", "dl", "dm", "donc", "dont", "dorénavant",
  "douze", "du", "dû", "dudit", "duquel", "durant", "E", "eh", "elle",
  "-elle", "elles", "-elles", "en", "'en", "-en", "encore", "enfin",
  "ensemble", "ensuite", "entre", "entre-temps", "envers", "environ",
  "es", "ès", "est", "et", "et/ou", "étaient", "étais", "était", "étant",
  "etc", "été", "êtes", "étiez", "étions", "être", "eu", "eue", "eues",
  "euh", "eûmes", "eurent", "eus", "eusse", "eussent", "eusses",
  "eussiez", "eussions", "eut", "eût", "eûtes", "eux", "exprès",
  "extenso", "extremis", "F", "facto", "fallait", "faire", "fais",
  "faisais", "faisait", "faisaient", "faisons", "fait", "faites",
  "faudrait", "faut", "fi", "flac", "fors", "fort", "forte", "fortiori",
  "frais", "fûmes", "fur", "furent", "fus", "fusse", "fussent", "fusses",
  "fussiez", "fussions", "fut", "fût", "fûtes", "G", "GHz", "gr",
  "grosso", "guère", "H", "ha", "han", "haut", "hé", "hein", "hem",
  "heu", "hg", "hier", "hl", "hm", "hm³", "holà", "hop", "hormis", "hors",
  "hui", "huit", "hum", "I", "ibidem", "ici", "ici-bas", "idem", "il",
  "-il", "illico", "ils", "-ils", "ipso", "item", "J", "j'", "jadis",
  "jamais", "je", "-je", "jusqu'", "jusqu'à", "jusqu'au", "jusqu'aux",
  "jusque", "juste", "K", "kg", "km", "km²", "L", "l'", "la", "-la", "là",
  "-là", "là-bas", "là-dedans", "là-dehors", "là-derrière",
  "là-dessous", "là-dessus", "là-devant", "là-haut", "laquelle",
  "l'autre", "le", "-le", "lequel", "les", "-les", "lès", "lesquelles",
  "lesquels", "leur", "-leur", "leurs", "lez", "loin", "l'on",
  "longtemps", "lors", "lorsqu'", "lorsque", "lui", "-lui", "l'un",
  "l'une", "M", "m'", "m²", "m³", "ma", "maint", "mainte", "maintenant",
  "maintes", "maints", "mais", "mal", "malgré", "me", "même", "mêmes",
  "mes", "mg", "mgr", "MHz", "mieux", "mil", "mille", "milliards",
  "millions", "minima", "ml", "mm", "mm²", "modo", "moi", "-moi", "moins",
  "mon", "moult", "moyennant", "mt", "N", "n'", "naguère", "ne",
  "néanmoins", "neuf", "ni", "nº", "non", "nonante", "nonobstant", "nos",
  "notre", "nous", "-nous", "nul", "nulle", "O", "ô", "octante", "oh",
  "on", "-on", "ont", "onze", "or", "ou", "où", "ouais", "oui", "outre",
  "P", "par", "parbleu", "parce", "par-ci", "par-delà", "par-derrière",
  "par-dessous", "par-dessus", "par-devant", "parfois", "par-là",
  "parmi", "partout", "pas", "passé", "passim", "pendant", "personne",
  "petto", "peu", "peut", "peuvent", "peux", "peut-être", "pis", "plus",
  "plusieurs", "plutôt", "point", "posteriori", "pour", "pourquoi",
  "pourtant", "préalable", "près", "presqu'", "presque", "primo",
  "priori", "prou", "pu", "puis", "puisqu'", "puisque", "Q", "qu'", "qua",
  "quand", "quarante", "quarante-cinq", "quarante-deux",
  "quarante-et-un", "quarante-huit", "quarante-neuf",
  "quarante-quatre", "quarante-sept", "quarante-six",
  "quarante-trois", "quasi", "quatorze", "quatre", "quatre-vingt",
  "quatre-vingt-cinq", "quatre-vingt-deux", "quatre-vingt-dix",
  "quatre-vingt-dix-huit", "quatre-vingt-dix-neuf",
  "quatre-vingt-dix-sept", "quatre-vingt-douze", "quatre-vingt-huit",
  "quatre-vingt-neuf", "quatre-vingt-onze", "quatre-vingt-quatorze",
  "quatre-vingt-quatre", "quatre-vingt-quinze", "quatre-vingts",
  "quatre-vingt-seize", "quatre-vingt-sept", "quatre-vingt-six",
  "quatre-vingt-treize", "quatre-vingt-trois", "quatre-vingt-un",
  "quatre-vingt-une", "que", "quel", "quelle", "quelles", "quelqu'",
  "quelque", "quelquefois", "quelques", "quelques-unes",
  "quelques-uns", "quelqu'un", "quelqu'une", "quels", "qui",
  "quiconque", "quinze", "quoi", "quoiqu'", "quoique", "R", "revoici",
  "revoilà", "rien", "S", "s'", "sa", "sans", "sauf", "se", "secundo",
  "seize", "selon", "sensu", "sept", "septante", "sera", "serai",
  "seraient", "serais", "serait", "seras", "serez", "seriez", "serions",
  "serons", "seront", "ses", "si", "sic", "sine", "sinon", "sitôt",
  "situ", "six", "soi", "soient", "sois", "soit", "soixante",
  "soixante-cinq", "soixante-deux", "soixante-dix",
  "soixante-dix-huit", "soixante-dix-neuf", "soixante-dix-sept",
  "soixante-douze", "soixante-et-onze", "soixante-et-un",
  "soixante-et-une", "soixante-huit", "soixante-neuf",
  "soixante-quatorze", "soixante-quatre", "soixante-quinze",
  "soixante-seize", "soixante-sept", "soixante-six", "soixante-treize",
  "soixante-trois", "sommes", "son", "sont", "soudain", "sous",
  "souvent", "soyez", "soyons", "stricto", "suis", "sur",
  "sur-le-champ", "surtout", "sus", "T", "-t", "t'", "ta", "tacatac",
  "tant", "tantôt", "tard", "te", "tel", "telle", "telles", "tels", "ter",
  "tes", "toi", "-toi", "ton", "tôt", "toujours", "tous", "tout", "toute",
  "toutefois", "toutes", "treize", "trente", "trente-cinq",
  "trente-deux", "trente-et-un", "trente-huit", "trente-neuf",
  "trente-quatre", "trente-sept", "trente-six", "trente-trois", "très",
  "trois", "trop", "tu", "-tu", "U", "un", "une", "unes", "uns", "USD",
  "V", "va", "vais", "vas", "vers", "veut", "veux", "via", "vice-versa",
  "vingt", "vingt-cinq", "vingt-deux", "vingt-huit", "vingt-neuf",
  "vingt-quatre", "vingt-sept", "vingt-six", "vingt-trois",
  "vis-à-vis", "vite", "vitro", "vivo", "voici", "voilà", "voire",
  "volontiers", "vos", "votre", "vous", "-vous", "W", "X", "y", "-y",
  "Z", "zéro"]]
stop_words = set(stop_words)
# translate to lower case, normalize whitespace
# for ease of filtering
# this needs to be a mapping as Unicode strings do not support traditional
# str.translate with a 256-length string
lc_map = {}
punct_map = {}
for c in string.whitespace:
  lc_map[ord(c)] = 32
del lc_map[32]
for c in string.punctuation + '\'\xab\xbb':
  punct_map[ord(c)] = 32
punct_map[0x2019] = "'"
# decode HTML entities with known Unicode equivalents
ent_re = re.compile(r'\&([^;]*);')
def ent_sub(m):
  ent = m.groups()[0]
  if ent in htmlentitydefs.name2codepoint:
    return chr(htmlentitydefs.name2codepoint[ent])
  if ent.startswith('#'):
    if ent.lower().startswith('#x'):
      codepoint = int('0x' + ent[2:], 16)
    else:
      try:
        codepoint = int(ent[1:])
      except ValueError:
        return ent
    if codepoint > 0 and codepoint < sys.maxunicode:
      return chr(codepoint)
  # fallback - leave as-is
  return '&%s;' % ent
  
def decode_entities(s):
  return ent_re.sub(ent_sub, s)
# XXX need to normalize for HTML entities as well
def lower(s):
  """Turn a string lower-case, including stripping accents"""
  return strip_diacritics(decode_entities(s)).translate(lc_map).lower()
# XXX this implementation is hopefully correct, but inefficient
# XXX we should be able to replace it with a finite state automaton in C
# XXX for better performance
# tested with u=u'\xe9sop\xe9sopfoo\xe9sop' and unicodedata.normalize('NFD', u)
def replace_first(s, pat, mark_begin, mark_end):
  """Case-insensitive replacement of the 1st occurrence of pat in s by repl"""
  lc = lower(s)
  pat = lower(pat)
  start = lc.find(pat)
  if start == -1:
    return s
  else:
    # (c)translitcodec does more than simply lowercasing, so we will need
    # to use bisection to find where in the untransliterated string the
    # pattern can be found
    if lower(s[start:]).find(pat) == -1:
      # As a fast-path, use the position in the transliterated string as
      # an initial guess of where to start, but in this case it did not work
      start = 0
    end = len(s)
    while lower(s[start:]).find(pat) > 0:
      if start == end:
        # XXX still can't find it, this shouldn't happen
        return s
      mid = (start + end + 1) // 2
      if lower(s[mid:]).find(pat) >= 0:
        start = mid
      else:
        end = mid
    # now we have the start, find the end
    end = start + len(pat)
    if lower(s[start:end]) != pat:
      end = start
      # the pattern may not be equal, e.g. searching for 'GB' in '£' that
      # expands to 'gbp'
      while not lower(s[start:end]).startswith(pat):
        end += 1
    return s[:start] + mark_begin + s[start:end] + mark_end + s[end:]
strip_tags_re = re.compile('<[^>]*>')
def get_words(s):
  return set([
    word for word
    in lower(str(strip_tags_re.sub('', str(s)))
             ).translate(punct_map).split()
    if word not in stop_words])
def stem(words):
  return {porter2.stem(word) for word in words}
  
########################################################################
# HTML tag balancing logic
#
# from the HTML4 loose DTD http://www.w3.org/TR/html4/loose.dtd
fontstyle = ('b', 'big', 'i', 's', 'small', 'strike', 'tt', 'u')
phrase = ('abbr', 'acronym', 'cite', 'code', 'dfn', 'em', 'kbd', 'samp',
          'strong', 'var')
heading = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6')
html4_elts = ('a', 'address', 'applet', 'area', 'base', 'basefont', 'bdo',
              'blockquote', 'body', 'br', 'button', 'caption', 'center',
              'col', 'colgroup', 'dd', 'del', 'dir', 'div', 'dl', 'dt',
              'fieldset', 'font', 'form', 'frame', 'frameset', 'head', 'hr',
              'html', 'iframe', 'img', 'input', 'ins', 'isindex', 'label',
              'legend', 'li', 'link', 'map', 'menu', 'meta', 'noframes',
              'noscript', 'object', 'ol', 'optgroup', 'option', 'p', 'param',
              'pre', 'q', 'script', 'select', 'span', 'style', 'sub', 'sup',
              'table', 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead',
              'title', 'tr', 'ul') + fontstyle + phrase + heading
inline_elts = ('a', 'abbr', 'acronym', 'address', 'bdo', 'caption', 'cite',
               'code', 'dfn', 'dt', 'em', 'font', 'i',
               'iframe', 'kbd', 'label', 'legend', 'p', 'pre', 'q', 's',
               'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup',
               'tt', 'u', 'var') + fontstyle + phrase + heading
# strictly speaking the closing '</p> tag is optional, but let's close it
# since it is so common
closing = ('a', 'address', 'applet', 'bdo', 'blockquote', 'button', 'caption',
           'center', 'del', 'dir', 'div', 'dl', 'fieldset', 'font', 'form',
           'frameset', 'iframe', 'ins', 'label', 'legend', 'map', 'menu',
           'noframes', 'noscript', 'object', 'ol', 'optgroup', 'pre', 'q',
           'script', 'select', 'span', 'style', 'sub', 'sup', 'table',
           'textarea', 'title', 'ul') + fontstyle + phrase + heading + ('p',)
# <!ENTITY % block
block = ('address', 'blockquote', 'center', 'dir', 'div', 'dl', 'fieldset',
         'form', 'hr', 'isindex', 'menu', 'noframes', 'noscript', 'ol', 'p',
         'pre', 'table', 'ul') + heading
# for XSS attacks, as feedparser is not completely immune
banned = ('script', 'applet', 'style')
# speed up things a bit
block = set(block)
closing = set(closing)
banned = set(banned)
acceptable_elements = set([
  'a', 'abbr', 'acronym', 'address', 'area', 'article', 'aside', 'audio', 'b',
  'big', 'blockquote', 'br', 'button', 'canvas', 'caption', 'center', 'cite',
  'code', 'col', 'colgroup', 'command', 'datagrid', 'datalist', 'dd', 'del',
  'details', 'dfn', 'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source',
  'fieldset', 'figcaption', 'figure', 'footer', 'font', 'form', 'header',
  'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins',
  'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
  'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option', 'p',
  'pre', 'progress', 'q', 's', 'samp', 'section', 'select', 'small', 'sound',
  'source', 'spacer', 'span', 'strike', 'strong', 'sub', 'sup', 'table',
  'tbody', 'td', 'textarea', 'time', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u',
  'ul', 'var', 'video', 'noscript'
])
acceptable_attributes = [
  'abbr', 'accept', 'accept-charset', 'accesskey', 'action', 'align', 'alt',
  'autocomplete', 'autofocus', 'axis', 'background', 'balance', 'bgcolor',
  'bgproperties', 'border', 'bordercolor', 'bordercolordark',
  'bordercolorlight', 'bottompadding', 'cellpadding', 'cellspacing', 'ch',
  'challenge', 'char', 'charoff', 'choff', 'charset', 'checked', 'cite',
  'class', 'clear', 'color', 'cols', 'colspan', 'compact', 'contenteditable',
  'controls', 'coords', 'data', 'datafld', 'datapagesize', 'datasrc',
  'datetime', 'default', 'delay', 'dir', 'disabled', 'draggable', 'dynsrc',
  'enctype', 'end', 'face', 'for', 'form', 'frame', 'galleryimg', 'gutter',
  'headers', 'height', 'hidefocus', 'hidden', 'high', 'href', 'hreflang',
  'hspace', 'icon', 'id', 'inputmode', 'ismap', 'keytype', 'label',
  'leftspacing', 'lang', 'list', 'longdesc', 'loop', 'loopcount', 'loopend',
  'loopstart', 'low', 'lowsrc', 'max', 'maxlength', 'media', 'method', 'min',
  'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'open', 'optimum',
  'pattern', 'ping', 'point-size', 'poster', 'pqg', 'preload', 'prompt',
  'radiogroup', 'readonly', 'rel', 'repeat-max', 'repeat-min', 'replace',
  'required', 'rev', 'rightspacing', 'rows', 'rowspan', 'rules', 'scope',
  'selected', 'shape', 'size', 'span', 'src', 'start', 'step', 'summary',
  'suppress', 'tabindex', 'target', 'template', 'title', 'toppadding', 'type',
  'unselectable', 'usemap', 'urn', 'valign', 'value', 'variable', 'volume',
  'vspace', 'vrml', 'width', 'wrap'
]
acceptable_css_properties = [
  'azimuth', 'background-color', 'border-bottom-color', 'border-collapse',
  'border-color', 'border-left-color', 'border-right-color',
  'border-top-color', 'clear', 'color', 'cursor', 'direction', 'display',
  'elevation', 'float', 'font', 'font-family', 'font-size', 'font-style',
  'font-variant', 'font-weight', 'height', 'letter-spacing', 'line-height',
  'overflow', 'pause', 'pause-after', 'pause-before', 'pitch', 'pitch-range',
  'richness', 'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
  'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
  'unicode-bidi', 'vertical-align', 'voice-family', 'volume', 'white-space',
  'width'
]
def sanitize_text(text):
  """Sanitize text fields like title or feed description for XSS"""
  return bleach.clean(
    text,
    tags=[],
    attributes=[],
    styles=[],
    strip=True
  )
  
tag_re = re.compile(r'(<>|<[^!].*?>|<!\[CDATA\[|\]\]>|<!--.*?-->|<[!]>)',
                    re.DOTALL | re.MULTILINE)
def balance(html, limit_words=None, ellipsis=' ...'):
  # we cannot trust feedparser to sanitize
  if not limit_words:
    #return html5lib.serialize(html5lib.parse(html))
    return bleach.clean(
      html,
      tags=acceptable_elements,
      attributes=acceptable_attributes,
      styles=acceptable_css_properties,
      strip=True
    )
  # the legacy balancing logic is redundant with Bleach's,
  # but this is seldom used
  word_count = 0
  tokens = tag_re.split(html)
  out = []
  stack = []
  for token in tokens:
    if not token.startswith('<'):
      if limit_words and word_count > limit_words:
        break
      words = token.split()
      word_count += len(words)
      if limit_words and word_count > limit_words:
        crop = limit_words - word_count
        out.append(' '.join(words[:crop]) + ellipsis)
      else:
        out.append(token)
      continue
    if token.startswith('<!'): continue
    if token == ']]>': continue
    if not token.endswith('>'): continue # invalid
    element = token[1:-1].split()[0].lower()
    if not element: continue # invalid
    if element in banned:
      element = 'pre'
      token = '<pre>'
    if element.startswith('/'):
      element = element[1:]
      if element in banned:
        element = 'pre'
        token = '</pre>'
      if element in stack:
        top = None
        while stack and top != element:
          top = stack.pop()
          out.append('</%s>' % top)
        continue
      else:
        continue
    if element in block and stack and stack[-1] not in block:
      # close previous block if any
      for i in range(len(stack) - 1, -1, -1):
        if stack[i] in block: break
      stack, previous_block = stack[:i], stack[i:]
      previous_block.reverse()
      for tag in previous_block:
        out.append('</%s>' % tag)
      
    if element in closing and not token.endswith('/>'):
      stack.append(element)
    out.append(token)
  # flush the stack
  out.extend(['</%s>' % element for element in reversed(stack)])
  html = ''.join(out)
  return bleach.clean(
    html,
    tags=acceptable_elements,
    attributes=acceptable_attributes,
    styles=acceptable_css_properties,
    strip=True
  )
########################################################################
def normalize_all(f):
  normalize_feed(f)
  for item in f.entries:
    normalize(item, f)
def normalize_feed(f):
  if 'description' not in f['channel']:
    f['channel']['description'] = f['channel'].get('title', '')
  f['channel']['description'] = sanitize_text(f['channel']['description'])
  if 'modified' in f and type(f['modified']) == str:
    try:
      f['modified'] = time.strptime(f['modified'],
                                    '%a, %d %b %Y %H:%M:%S GMT')
    except ValueError:
      f['modified'] = time.strptime(f['modified'],
                                    '%a, %d %b %Y %H:%M:%S +0000')
# Often, broken RSS writers will not handle daylight savings time correctly
# and use a timezone that is off by one hour. For instance, in the US/Pacific
# time zone:
# February 3, 2004, 5:30PM is 2004-02-03T17:30:00-08:00 (standard time)
# August 3, 2004, 5:30PM US/Pacific is 2004-08-03T17:30:00-07:00 (DST)
# but broken implementations will incorrectly write:
# 2004-08-03T17:30:00-08:00 in the second case
# There is no real good way to ward against this, but if the created or
# modified date is in the future, we are clearly in this situation and
# substract one hour to correct for this bug
def fix_date(date_tuple):
  if not date_tuple:
    return date_tuple
  if date_tuple > time.gmtime():
    # feedparser's parsed date tuple has no DST indication, we need to force it
    # because there is no UTC equivalent of mktime()
    date_tuple = date_tuple[:-1] + (-1,)
    date_tuple = time.localtime(time.mktime(date_tuple) - 3600)
    # if it is still in the future, the implementation is hopelessly broken,
    # truncate it to the present
    if date_tuple > time.gmtime():
      return time.gmtime()
    else:
      return date_tuple
  else:
    return date_tuple
# why doesn't feedparser do these basic normalizations?
def basic(f, feed_xml):
  if 'url' not in f:
    f['url'] = feed_xml
  # CVS versions of feedparser are not throwing exceptions as they should
  # see:
  # http://sourceforge.net/tracker/index.php?func=detail&aid=1379172&group_id=112328&atid=661937
  if not f.feed or ('link' not in f.feed or 'title' not in f.feed):
    # some feeds have multiple links, one for self and one for PuSH
    if f.feed and 'link' not in f.feed and 'links' in f.feed:
      try:
        for l in f.feed['links']:
          if l['rel'] == 'self':
            f.feed['link'] = l['href']
      except KeyError:
        pass
  if 'title' in f.feed:
    f.feed['title'] = sanitize_text(f.feed['title'])
  
def dereference(url, seen=None, level=0):
  """Recursively dereference a URL"""
  # this set is used to detect redirection loops
  if seen is None:
    seen = set([url])
  else:
    seen.add(url)
  # stop recursion if it is too deep
  if level > 16:
    return url
  try:
    r = requests.get(url, allow_redirects=False, timeout=param.http_timeout)
    if not r.is_redirect:
      return url
    else:
      # break a redirection loop if it occurs
      redir = r.headers.get('Location')
      if True not in [redir.startswith(p)
                      for p in ['http://', 'https://', 'ftp://']]:
        return url
      if redir in seen:
        return url
      # some servers redirect to Unicode URLs, which are not legal
      try:
        str(redir)
      except UnicodeDecodeError:
        return url
      # there might be several levels of redirection
      return dereference(redir, seen, level + 1)
  except (requests.exceptions.RequestException, ValueError, socket.error):
    return url
  except:
    util.print_stack()
    return url
  
url_re = re.compile('(?:href|src)="([^"]*)"', re.IGNORECASE)
def normalize(item, f, run_filters=True):
  # get rid of RDF lossage...
  for key in ['title', 'link', 'created', 'modified', 'author',
              'content', 'content_encoded', 'description']:
    if type(item.get(key)) == list:
      if len(item[key]) == 1:
        item[key] = item[key][0]
      else:
        candidate = [i for i in item[key] if i.get('type') == 'text/html']
        if len(candidate) > 1 and key == 'content':
          candidate = sorted(candidate,
                             key=lambda i: len(i.get('value', '')),
                             reverse=True)[:1]
        if len(candidate) == 1:
          item[key] = candidate[0]
        else:
          # XXX not really sure how to handle these cases
          print('E' * 16, 'ambiguous RDF', key, item[key], file=param.log)
          item[key] = item[key][0]
    if isinstance(item.get(key), dict) and 'value' in item[key]:
      item[key] = item[key]['value']
  ########################################################################
  # title
  if 'title' not in item or not item['title'].strip():
    item['title'] = 'Untitled'
  item['title'] = sanitize_text(item['title'])
  item['title_lc'] =   lower(item['title'])
  item['title_words_exact'] =  get_words(item['title_lc'])
  item['title_words'] =  stem(item['title_words_exact'])
  ########################################################################
  # link
  #
  # The RSS 2.0 specification allows items not to have a link if the entry
  # is complete in itself
  # that said this is almost always spurious, so we filter it below
  if 'link' not in item:
    item['link'] = f['channel']['link']
    # We have to be careful not to assign a default URL as the GUID
    # otherwise only one item will ever be recorded
    if 'id' not in item:
      item['id'] = 'HASH_CONTENT'
      item['RUNT'] = True
  ########################################################################
  # GUID
  if 'id' not in item:
    item['id'] = item['link']
  ########################################################################
  # creator
  if 'author' not in item or item['author'] == 'Unknown':
    item['author'] = 'Unknown'
    if 'author' in f['channel']:
      item['author'] = f['channel']['author']
  item['author'] = sanitize_text(item['author'])
  ########################################################################
  # created amd modified dates
  if 'modified' not in item:
    item['modified'] = f['channel'].get('modified')
  # created - use modified if not available
  if 'created' not in item:
    if 'modified_parsed' in item:
      created = item['modified_parsed']
    else:
      created = None
  else:
    created = item['created_parsed']
  if not created:
    # XXX use HTTP last-modified date here
    created = time.gmtime()
    # feeds that do not have timestamps cannot be garbage-collected
    # XXX need to find a better heuristic, as high-volume sites such as
    # XXX The Guardian, CNET.com or Salon.com lack item-level timestamps
    f['oldest'] = '1970-01-01 00:00:00'
  created = fix_date(created)
  item['created'] = time.strftime(date_fmt, created)
  # keep track of the oldest item still in the feed file
  if 'oldest' not in f:
    f['oldest'] = '9999-99-99 99:99:99'
  if item['created'] < f['oldest']:
    f['oldest'] = item['created']
  # finish modified date
  if 'modified_parsed' in item and item['modified_parsed']:
    modified = fix_date(item['modified_parsed'])
    # add a fudge factor time window within which modifications are not
    # counted as such, 10 minutes here
    if not modified or abs(time.mktime(modified) - time.mktime(created)) < 600:
      item['modified'] = None
    else:
      item['modified'] = time.strftime(date_fmt, modified)
  else:
    item['modified'] = None
  ########################################################################
  # content
  if 'content' in item:
    content = item['content']
  elif 'content_encoded' in item:
    content = item['content_encoded']
  elif 'description' in item:
    content = item['description']
  else:
    content = '<a href="' + item['link'] + '">' + item['title'] + '</a>'
  if not content:
    content = '<a href="' + item['link'] + '">' + item['title'] + '</a>'
  # strip embedded NULs as a defensive measure
  content = content.replace('\0', '')
  # apply ad filters and other degunking to content
  old_content = None
  while old_content != content:
    old_content = content
    try:
      for filter in transform.filter_list:
        content = filter.apply(content, f, item)
    except:
      util.print_stack(black_list=['item'])
  # balance tags like <b>...</b> and sanitize
  content = balance(content)
  content_lc = lower(content)
  # the content might have invalid 8-bit characters.
  # Heuristic suggested by Georg Bauer
  if type(content) != str:
    try:
      content = content.decode('utf-8')
    except UnicodeError:
      content = content.decode('iso-8859-1')
  #
  item['content'] = content
  # we recalculate this as content may have changed due to tag rebalancing, etc
  item['content_lc'] = lower(content)
  item['content_words_exact'] = get_words(item['content_lc'])
  item['content_words'] = stem(item['content_words_exact'])
  item['union_lc'] = item['title_lc'] + '\n' + item['content_lc']
  item['union_words'] = item['title_words'].union(item['content_words'])
  item['urls'] = url_re.findall(content)
  ########################################################################
  # categories/tags
  # we used 'category' before, but 'category' and 'categories' are
  # intercepted by feedparser.FeedParserDict.__getitemm__ and treated as
  # special case
  if 'tags' in item and type(item['tags']) == list:
    item['item_tags'] = set([lower(sanitize_text(t['term']))
                             for t in item['tags']])
  else:
    item['item_tags'] = []
  ########################################################################
  # map unicode
  # for key in ['title', 'link', 'created', 'modified', 'author', 'content']:
  #   if type(item.get(key)) == str:
  #     item[key] = item[key].encode('ascii', 'xmlcharrefreplace')
  # hash the content as the GUID if required
  if item['id'] == 'HASH_CONTENT':
    item['id']= hashlib.md5(
      (item['title'] + item['content']).encode('utf-8')).hexdigest()
  return item
  
def escape_xml(s):
  """Escape entities for a XML target"""
  try:
    s = s.decode('utf-8')
  except:
    pass
    
  return s.replace('&', '&').replace("'", "'").replace('"', '"').replace('<', '<').replace('>', '>').encode('ascii', 'xmlcharrefreplace').decode('ascii')
 | 
	mit | -4,501,585,619,127,446,500 | 43.844993 | 173 | 0.580693 | false | 
| 
	a-krebs/finances | 
	finances/finances/urls.py | 
	1 | 
	1583 | 
	# Copyright (C) 2012  Aaron Krebs [email protected]
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# development serving of static files
# TODO: before deployment, server static files properly
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = patterns('',
    # Uncomment the admin/doc line below to enable admin documentation:
    url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
    # Uncomment the next line to enable the admin:
    url(r'^admin/', include(admin.site.urls)),
    
    url(r'^accounts/', include('accounts.urls', namespace='accounts')),
    url(r'^budgets/', include('budgets.urls', namespace='budgets')),
    url(r'^register/', include('django_registration.urls', namespace='registration')),
)
# TODO: remove before deployment
urlpatterns += staticfiles_urlpatterns()
 | 
	gpl-3.0 | 1,204,365,858,756,522,800 | 40.657895 | 86 | 0.749842 | false | 
| 
	XD-embedded/xd-build-core | 
	tests/recipe_file_test.py | 
	1 | 
	4304 | 
	from xd.build.core.recipe_file import *
from case import *
import unittest
import io
class tests(unittest.case.TestCase):
    def test_split_no_version(self):
        name, version = RecipeFile.split_name_and_version('foo')
        self.assertEqual(name, 'foo')
        self.assertEqual(str(version), '')
    def test_split_version_1(self):
        name, version = RecipeFile.split_name_and_version('foo_4.2')
        self.assertEqual(name, 'foo')
        self.assertEqual(str(version), '4.2')
    def test_split_version_2(self):
        name, version = RecipeFile.split_name_and_version('foo_4.2.xd')
        self.assertEqual(name, 'foo')
        self.assertEqual(str(version), '4.2')
    def test_split_bad_version_1(self):
        with self.assertRaises(InvalidRecipeName):
            RecipeFile.split_name_and_version('')
    def test_split_bad_version_2(self):
        with self.assertRaises(InvalidRecipeName):
            RecipeFile.split_name_and_version('foo_bar_4.2')
    def test_with_path_1(self):
        recipe_file = RecipeFile('/path/to/something/foo.xd')
        self.assertEqual(recipe_file.name, 'foo')
        self.assertEqual(str(recipe_file.version), '')
    def test_with_path_2(self):
        recipe_file = RecipeFile('/some/path/bar_31.7.xd')
        self.assertEqual(recipe_file.name, 'bar')
        self.assertEqual(str(recipe_file.version), '31.7')
    def test_with_odd_path_1(self):
        with self.assertRaises(InvalidRecipeFilename):
            RecipeFile('/some/path/.xd')
    def test_bad_filename_1(self):
        with self.assertRaises(InvalidRecipeFilename):
            RecipeFile('/tmp/foo.bar')
    def test_bad_filename_2(self):
        with self.assertRaises(InvalidRecipeFilename):
            RecipeFile('/tmp/foo')
    def test_badd_filename_3(self):
        with self.assertRaises(InvalidRecipeFilename):
            RecipeFile('/some/path/.xd')
    def test_badd_filename_4(self):
        with self.assertRaises(InvalidRecipeFilename):
            RecipeFile('/some/path/foo_bar_1.xd')
    def test_with_odd_name(self):
        recipe_file = RecipeFile('/some/path/bar.93-1_4.xd')
        self.assertEqual(recipe_file.name, 'bar.93-1')
        self.assertEqual(str(recipe_file.version), '4')
    def test_with_odd_version_1(self):
        recipe_file = RecipeFile('/some/path/bar_4.2.1rc3.1-1.xd')
        self.assertEqual(recipe_file.name, 'bar')
        self.assertEqual(str(recipe_file.version), '4.2.1rc3.1-1')
    def test_with_odd_version_2(self):
        recipe_file = RecipeFile('/some/path/bar_89.23~build-189.xd')
        self.assertEqual(recipe_file.name, 'bar')
        self.assertEqual(str(recipe_file.version), '89.23~build-189')
    def test_str_1(self):
        recipe_file = RecipeFile('/tmp/foo.xd')
        self.assertEqual(str(recipe_file), 'foo')
    def test_str_2(self):
        recipe_file = RecipeFile('/tmp/foo_1.89.xd')
        self.assertEqual(str(recipe_file), 'foo_1.89')
    def test_repr(self):
        recipe_file = RecipeFile('/tmp/foo_1.89.xd')
        self.assertEqual(repr(recipe_file), "RecipeFile('/tmp/foo_1.89.xd')")
    def test_eq_1(self):
        recipe_file_a = RecipeFile('/tmp/foo_1.89.xd')
        recipe_file_b = RecipeFile('/tmp/foo_1.89.xd')
        self.assertEqual(recipe_file_a, recipe_file_b)
    def test_eq_2(self):
        recipe_file_a = RecipeFile('/tmp/foo_1.89.xd')
        recipe_file_b = RecipeFile('/tmp/foo_1.90.xd')
        self.assertNotEqual(recipe_file_a, recipe_file_b)
    def test_eq_3(self):
        recipe_file_a = RecipeFile('/tmp/foo_1.89.xd')
        recipe_file_b = '/tmp/foo_1.89.xd'
        self.assertNotEqual(recipe_file_a, recipe_file_b)
class parse_tests(TestCase):
    def test_parse_1(self):
        with open('foobar.xd', 'w') as f:
            f.write('FOO="foo"\n')
        recipe_file = RecipeFile('foobar.xd')
        d = recipe_file.parse()
        self.assertEqual(len(d), 1)
        self.assertEqual(d['FOO'].get(), 'foo')
    def test_dump_1(self):
        with open('foobar.xd', 'w') as f:
            f.write('FOO="foo"\n')
        recipe_file = RecipeFile('foobar.xd')
        recipe_file.parse()
        stream = io.StringIO()
        recipe_file.dump(stream=stream)
        self.assertRegex("FOO='foo'\n", stream.getvalue())
 | 
	mit | 5,633,485,530,337,033,000 | 34.278689 | 77 | 0.619424 | false | 
| 
	oztalha/weatherforecast | 
	weather.py | 
	1 | 
	14123 | 
	"""
author Talha Oz
this is the main code implemented for cs780 class project
"""
# -*- coding: utf-8 -*-
import pandas as p
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
import re
from sklearn import cross_validation
import preprocessing as pre
from sklearn.naive_bayes import MultinomialNB
from sklearn.decomposition import PCA
from sklearn.multiclass import OneVsRestClassifier
from sklearn import linear_model
import matplotlib.pyplot as plt
from variableNames import *
import scipy.sparse
from sklearn.linear_model import SGDRegressor
from sklearn.linear_model import SGDClassifier
from sklearn.neighbors import KNeighborsRegressor
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_selection import SelectKBest, chi2
"""
from sklearn.svm import SVC
from sklearn.decomposition import TruncatedSVD
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.linear_model import Perceptron
from sklearn import svm
from sklearn.preprocessing import LabelBinarizer
import matplotlib.pyplot as plt
from scipy import sparse
from sklearn import decomposition
from sklearn.ensemble import ExtraTreesRegressor
import sklearn.decomposition as deco
import argparse
from sklearn.svm import SVC
%autoreload 2
"""
def plotClasses(y):
	"""
	each class is counted by its weight, not # of nonzero occurrences
	"""
	fig = plt.figure()
	ax = plt.subplot(1,1,1)
	x1 = range(y.shape[1])
	y1 = [sum(y[:,a]) for a in range(y.shape[1])]
	width = 0.8
	labels = "s1,s2,s3,s4,s5,w1,w2,w3,w4,k1,k2,k3,k4,k5,k6,k7,k8,k9,k10,k11,k12,k13,k14,k15".split(',')
	if y.shape[1] == 5:
		labels = labels[:5]
	elif y.shape[1] == 4:
		labels = labels[5:9]
	else:
		labels = labels[9:]
	plt.xticks(np.arange(y.shape[1])+width/2,labels)
	legendkeys = tuple([k for k,v in legend.items() if k in labels])
	legendvalues= tuple([v for k,v in legend.items() if k in labels])
	[ax.bar(X,Y,width=width,label=k+' '+v) for X,Y,k,v in zip(x1,y1,legendkeys,legendvalues)]
	# Shink current axis by 20%
	box = ax.get_position()
	ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
	# Put a legend to the right of the current axis
	ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
	#ax.legend(('1','2'),('1a','2a'))#legendkeys,legendvalues
	plt.show()
def vectorize(y,train,t2,model,kbest=0,is_tfidf=True,is_sparse=True,max_features=None,is_cv=False,perlabel=False,min_df=5,is_nominal=False,is_normal=False,is_LSA=False,scale=False):
	
	if is_cv:
		X_train, X_test, y_train, y_true = cross_validation.train_test_split(train['tweet'], y, test_size=.20, random_state = 0)
		# X_train1, X_test1, y_train, y_true = cross_validation.train_test_split(train['tweet'], y, test_size=.20, random_state = 0)
		# X_train2, X_test2, y_train, y_true = cross_validation.train_test_split(train['state'], y, test_size=.20, random_state = 0)
		# X_train3, X_test3, y_train, y_true = cross_validation.train_test_split(train['location'], y, test_size=.20, random_state = 0)
		# X_train = np.hstack((X_train1,X_train2,X_train3))
		# X_test = np.hstack((X_test1,X_test2,X_test3))
	else:
		X_train = train['tweet']
		X_test = t2['tweet']
		y_train = y
	
	# if (y_train.shape[1] > 6):
	# 	model = linear_model.Ridge (alpha = 3.0, normalize = False)
	# if is_PCA:
	# 	modelPCA = PCA(n_components='mle')
	# 	model.fit(X_train)
	if is_tfidf:
		#tfidf = TfidfVectorizer(max_features=max_features, strip_accents='unicode', analyzer='word', smooth_idf=True,sublinear_tf=True,max_df=0.5,min_df=min_df,ngram_range=(1,2),use_idf=True)
		tfidf = TfidfVectorizer(max_features=max_features,strip_accents='unicode', analyzer='word', smooth_idf=True,sublinear_tf=True,max_df=0.5,min_df=5,ngram_range=(1,2),use_idf=True)
		#tfidf.fit(np.hstack((X_train,X_test))) #fitting on the whole data resulted in a worse mse score
		tfidf.fit(X_train)
		X_train = tfidf.transform(X_train)
		X_test = tfidf.transform(X_test)
		if is_LSA:
			LSA = TruncatedSVD(n_components=10000, algorithm='randomized', n_iter=5, random_state=0, tol=0.0)
			X_train = LSA.fit_transform(X_train)
			X_test = LSA.transform(X_test)
	else:
		vectorizer = CountVectorizer( binary = True )
		X_train = vectorizer.fit_transform(X_train)
		X_test = vectorizer.transform(X_test)
	if is_nominal:
		if (y_train.shape[1] < 16):
			y_rest = y_train.copy()
			X_train_list = []
			y_weight_list = []
			y_train_list = []
			for i in range(y_rest.shape[1]):
				X_train_list.append(X_train) # repeat X to matchup
				y_weight_list.append(np.apply_along_axis(lambda a: a.max(), 1, y_rest)) # get the maximum in y_rest
				y_train_list.append(np.apply_along_axis(lambda a: a.argmax(), 1, y_rest).astype(int)) # get the position of the maximum in y_rest
				y_rest = np.apply_along_axis(lambda a: [0 if i == a.argmax() else x for i,x in enumerate(a)], 1, y_rest) #set maximum to zero
				# y_weight = np.concatenate((y_weight, np.apply_along_axis(lambda a: a.max(), 1, y_rest)))
				# y_train = np.concatenate((y_train, np.apply_along_axis(lambda a: a.argmax(), 1, y_rest).astype(int)))
				# y_train = np.apply_along_axis(lambda a: [np.floor(x) if x != a.max() else 1 for x in a], 1, y_train).astype(bool)
			not_kind = True
			X_train = scipy.sparse.vstack(X_train_list)
			y_train = np.concatenate(y_train_list)
			y_weight = np.concatenate(y_weight_list)
		else:
			not_kind = False
			#y_train = np.apply_along_axis(lambda a: [np.floor(x) if i != a.argmax() else 1 for i,x in enumerate(a)], 1, y_train).astype(bool)
		#y_train = np.ceil(y_train).astype(bool)
		#y_weight = y_train.copy()
	if perlabel:
		test_prediction=[]
		for i in range(y_train.shape[1]):
			if is_nominal:
				model.fit(X_train,y_train[:,i]) #sample_weight=y_weight[:,i]
				pred = model.predict_proba(X_test)
				# pred = model.predict_log_proba(X_test) # for log in SGDRegressor
				print pred.shape
				test_prediction.append(pred)
			else:
				model.fit(X_train,y_train[:,i])
				test_prediction.append(model.predict(X_test))
		pred = np.array(test_prediction).T
	if kbest:
		ch2 = SelectKBest(chi2,kbest,k=1000)
		#yb = y_train
		yb = np.around(y_train).astype(bool)
		X_train = ch2.fit_transform(X_train, yb)
		X_test  = ch2.transform(X_test)
	
	if not is_sparse:
		X_train = X_train.toarray()
		X_test = X_test.toarray()
		#nmf = decomposition.NMF(n_components=y_train.shape[1]).fit(tfidf)
		#cca = CCA(n_components=100)
		#X_train = cca.fit_transform(X_train)
		#X_test = cca.transform(X_test)
	if not perlabel:
		if is_nominal and not_kind:
			model.fit(X_train, y_train,sample_weight=y_weight)
			pred = model.predict_proba(X_test)
			#model.fit(X_train.toarray(), y_train.toarray(),sample_weight=y_weight)
			#pred = model.predict_proba(X_test.toarray())
			# model.fit(scipy.sparse.csr_matrix(X_train), scipy.sparse.csr_matrix(y_train),sample_weight=y_weight) # added tocsr() !!!
			# pred = model.predict_proba(scipy.sparse.csr_matrix(X_test))
			#model.fit(scipy.sparse.csr_matrix(X_train), y_train,sample_weight=y_weight) #perceptron
			#pred = model.predict_proba(scipy.sparse.csr_matrix(X_test))
		else:
			model.fit(X_train, y_train)
			pred = model.predict(X_test)
	if scale:
		if (y_train.shape[1] < 6):
			pred = np.apply_along_axis(lambda a: a/(np.max(a)-np.min(a)),1,pred)
	if is_normal and (y_train.shape[1] < 6):
		#pred[pred < 0.1] = 0.0
		#pred[pred > 0.9] = 1.0
		row_sums = pred.sum(axis=1)
		pred = pred / row_sums[:, np.newaxis]
	pred = np.around(pred,3)
	pred = pred.clip(0,1)
	if is_cv:
		return pred,y_true
	else:
		return pred
def cv_loop(train, t2, model, is_sparse=True,kbest=0,is_class=False,is_tfidf=True,max_features=20000,perlabel=False,min_df=5,is_nominal=False,is_normal=False,is_LSA=False,scale=False):
	y = np.array(train.ix[:,4:])
	ys = y[:,:5]#4:9 labeles of sentiment
	yw = y[:,5:9]#9:13 labels of when
	yk = y[:,9:]#13: labels of kind
	if is_class:
		ys,yw,yk = [np.around(y).astype(bool) for y in (ys,yw,yk)]
	if perlabel:
		pred,ytrue = vectorize(y,train,t2,model,is_tfidf = is_tfidf,kbest=kbest,is_sparse=is_sparse,max_features=max_features,is_cv=True,perlabel=perlabel,is_nominal=is_nominal,is_normal=is_normal,min_df=min_df,scale=scale)
	else:
		#(preds,ys_true),(predw,yw_true) = [vectorize(y,train,t2,model,is_tfidf = is_tfidf,kbest=kbest,is_sparse=is_sparse,max_features=max_features,is_cv=True,perlabel=perlabel,min_df=min_df,is_nominal=is_nominal,is_normal=is_normal) for y in (ys,yw)]
		#pred = np.hstack((preds,predw))
		#ytrue = np.hstack((ys_true,yw_true))
		(preds,ys_true),(predw,yw_true),(predk,yk_true) = [vectorize(y,train,t2,model,is_tfidf = is_tfidf,kbest=kbest,is_sparse=is_sparse,max_features=max_features,is_cv=True,perlabel=perlabel,min_df=min_df,is_nominal=is_nominal,is_normal=is_normal,is_LSA=is_LSA,scale=scale) for y in (ys,yw,yk)]
		pred = np.hstack((preds,predw,predk))
		ytrue = np.hstack((ys_true,yw_true,yk_true))
	#pred[pred < 0.01] = 0.0
	#pred[pred > 0.99] = 1.0
	mse = np.sqrt(np.sum(np.array(pred-ytrue)**2)/(ytrue.shape[0]*float(ytrue.shape[1])))
	print 'Train error: {0}'.format(mse)
	return pred,ytrue
	
def submission(predictions,filename='prediction.csv'): 
	col = '%i,' + '%.2f,'*23 + '%.2f'
	header = "id,s1,s2,s3,s4,s5,w1,w2,w3,w4,k1,k2,k3,k4,k5,k6,k7,k8,k9,k10,k11,k12,k13,k14,k15"
	np.savetxt(filename, predictions,col, header=header,delimiter=',') # need to remove first two characters in the output file before submitting!
def crossvalidate(clf,X,y,cv=3):
	scores=[]
	for i in range(int(y.shape[1])):
		clf.fit(X,y[:,i])
		scores.append(cross_val_score(clf, X, y[:,i]))
		print scores[-1],
	return scores
def predictMe(clf,X,y,test,ids):
	test_prediction=[]
	for i in range(int(y.shape[1])):
		clf.fit(X,y[:,i])
		test_prediction.append(clf.predict(test))
	testpred = np.array(test_prediction)
	prediction = np.array(np.hstack([np.matrix(ids).T, testpred.T]))
	return prediction
def predictThis(clf,train,t2,kbest=0,max_features=20000,is_tfidf=True,is_sparse=True,is_nominal=False,is_LSA=False,min_df=5):
	y = np.array(train.ix[:,4:])
	ys = y[:,:5]#4:9 labeles of sentiment
	yw = y[:,5:9]#9:13 labels of when
	yk = y[:,9:]#13: labels of kind
	if is_tfidf:
		#create a tf-idf class with the given features (stop_words='english' is removed since this is done in preprocessing)
		tfidf = TfidfVectorizer(max_features=max_features, strip_accents='unicode', analyzer='word', smooth_idf=True,sublinear_tf=True,max_df=0.5, min_df=min_df, ngram_range=(1,2))
		sent, when, kind = [vectorize(y,train,t2,clf,is_tfidf = is_tfidf,kbest=kbest,is_sparse=is_sparse,max_features=max_features,is_nominal=is_nominal,is_LSA=is_LSA) for y in (ys,yw,yk)]
	testpred = np.hstack((sent, when, kind))
	testpred = np.around(testpred.clip(0,1),3)
	prediction = np.array(np.hstack([np.matrix(t2['id']).T, testpred]))
	return prediction
# to learn about indexing in pandas: http://pandas.pydata.org/pandas-docs/stable/indexing.html#advanced-indexing-with-hierarchical-index
def predictKind(train_file,test_file):
	train_file='train.csv'
	test_file='test.csv'
	#read files into pandas
	train = p.read_csv(train_file)
	t2 = p.read_csv(test_file)
	for row in train.index:
		train['tweet'][row]=' '.join([train['tweet'][row],train['state'][row],str(train['location'][row])])
	#preprocessing for kind prediction: emoticons and stop words can be ignored
	# for row in train.index:
	# 	train['tweet'][row] = pre.preprocess_pipeline(' '.join([train['tweet'][row],train['state'][row],str(train['location'][row])]), return_as_str=True, do_remove_stopwords=True,do_emoticons=True)
	# for row in t2.index:
	# 	t2['tweet'][row] = pre.preprocess_pipeline(' '.join([t2['tweet'][row],str(t2['state'][row]),str(t2['location'][row])]), return_as_str=True, do_remove_stopwords=True,do_emoticons=True)
	clf = linear_model.Ridge (alpha = 3.0, normalize = True)
	#pred,ytrue = cv_loop(train, t2, clf)
	#row_sums = pred.sum(axis=1)
	#pred_norm = pred / row_sums[:, numpy.newaxis]
	#mse = np.sqrt(np.sum(np.array(pred_norm-ytrue)**2)/(pred_norm.shape[0]*24.0))
	#print 'Normalized train error: {0}'.format(mse) #Normalized train error: 0.366281924654
	prediction = predictThis(clf,train,t2)
	submission(prediction,'prediction.csv')
	#metadata =sparse.csr_matrix([ metadata ]).T
	#X = sparse.hstack([X, metadata]).tocsr()
	#metadata = (metadata - mean(metadata))/(max(metadata) - min(metadata))
if __name__ == "__main__":
	# parse commandline arguments
	parser = argparse.ArgumentParser()
	parser.add_argument("train_file", help="a filename name must be specified")
	parser.add_argument("test_file", help="a filename name must be specified")
	args = parser.parse_args()
	prediction = predictKind(args.train_file, args.test_file)
	print 'elh'
"""
# a nice document classification example: http://scikit-learn.org/stable/auto_examples/document_classification_20newsgroups.html
# we like ensemble: http://scikit-learn.org/stable/modules/ensemble.html
You use the vocabulary parameter to specify what features should be used. For example, if you want only emoticons to be extracted, you can do the following:
emoticons = {":)":0, ":P":1, ":(":2}
vect = TfidfVectorizer(vocabulary=emoticons)
matrix = vect.fit_transform(traindata)
This will return a <Nx3 sparse matrix of type '<class 'numpy.int64'>' with M stored elements in Compressed Sparse Row format>]. Notice there are only 3 columns, one for each feature.
If you want the vocabulary to include the emoticons as well as the N most common features, you could calculate the most frequent features first, then merge them with the emoticons and re-vectorize like so:
# calculate the most frequent features first
vect = TfidfVectorizer(vocabulary=emoticons)
matrix = vect.fit_transform(traindata, max_features=10)
top_features = vect.vocabulary_
n = len(top_features)
# insert the emoticons into the vocabulary of common features
emoticons = {":)":0, ":P":1, ":(":2)}
for feature, index in emoticons.items():
    top_features[feature] = n + index
# re-vectorize using both sets of features
# at this point len(top_features) == 13
vect = TfidfVectorizer(vocabulary=top_features)
matrix = vect.fit_transform(traindata)
""" | 
	mit | -7,789,753,431,757,353,000 | 42.592593 | 290 | 0.705091 | false | 
| 
	sjdv1982/seamless | 
	docs/plans/big-data/testing.py | 
	1 | 
	12459 | 
	"""
Performance tests based on tests/highlevel/high-in-low6-memory.py
See also auth-*py.
- Data overhead is now at ~6.5 ms / MB.
  A lot of the data overhead comes from json.dumps. This is to build mixed cells.
  The rest is from get_hash.
  Pure Python version (with get_hash and dumps) is at 6.3 ms / MB,
  so the rest of the data overhead is fine!
- Database upload overhead is about the same (7 ms / MB) with a flatfile backend
  Database download is almost free.
- A structured cell auth operation is about 10 ms.
- map-list macro evaluation is cheap, 5 ms per (parallel) transformation
- re-translation is about 20 ms per transformation (on top of the macro)
- expression evaluation is about 10 ms + 0.5 ms / MB (of input + output) per transformation
  (speculative relationship!)
- BUT: Non-linear scaling:
  between 100 and 1000 parallel transformations, a x4 slowdown is observed for the last three overheads above.
NOTE: Mixed-to-str conversion is expensive, don't do it!
"""
import sys
import seamless
import seamless.core.execute
seamless.core.execute.DIRECT_PRINT = True
seamless.database_sink.connect()
seamless.database_cache.connect()
#seamless.set_ncores(2)
#seamless.set_parallel_evaluations(5)
seamless.set_ncores(8) ###
seamless.set_parallel_evaluations(100)  ###
# for the big testing, 20 evaluations
seamless.set_parallel_evaluations(20)  ###
"""
import logging
logging.basicConfig()
logging.getLogger("seamless").setLevel(logging.DEBUG)
"""
from seamless.highlevel import Context, Cell, Macro
from seamless.highlevel.library import LibraryContainer
import time
import cProfile
cProfile.profiler = cProfile.Profile()
mylib = LibraryContainer("mylib")
mylib.map_list_N = Context()
def constructor(ctx, libctx, context_graph, inp, result):
    m = ctx.m = Macro()
    m.graph = context_graph
    m.pins.result = {"io": "output", "celltype": "mixed", "hash_pattern": {"!": "#"}}
    ctx.inp = Context()
    ctx.cs_inp = Context()
    inp_prefix = "INPUT_"
    m.inp_prefix = inp_prefix
    for key in inp:
        c = Cell()
        ctx.inp[key] = c
        c.hash_pattern = {"!": "#"}
        inp[key].connect(c)
        ctx.cs_inp[key] = Cell("checksum")
        ctx.cs_inp[key] = ctx.inp[key]
        setattr(m, inp_prefix + key , ctx.cs_inp[key])
    def map_list_N(ctx, inp_prefix, graph, **inp):
        #print("INP", inp)
        first_k = list(inp.keys())[0]
        length = len(inp[first_k])
        first_k = first_k[len(inp_prefix):]
        for k0 in inp:
            k = k0[len(inp_prefix):]
            if len(inp[k0]) != length:
                err = "all cells in inp must have the same length, but '{}' has length {} while '{}' has length {}"
                raise ValueError(err.format(k, len(inp[k0]), first_k, length))
        print("LENGTH", length)
        from seamless.core import Cell as CoreCell
        from seamless.core.unbound_context import UnboundContext
        pseudo_connections = []
        ctx.result = cell("mixed", hash_pattern = {"!": "#"})
        ctx.sc_data = cell("mixed", hash_pattern = {"!": "#"})
        ctx.sc_buffer = cell("mixed", hash_pattern = {"!": "#"})
        ctx.sc = StructuredCell(
            data=ctx.sc_data,
            buffer=ctx.sc_buffer,
            inchannels=[(n,) for n in range(length)],
            outchannels=[()],
            hash_pattern = {"!": "#"}
        )
        for n in range(length):
            #print("MACRO", n+1)
            hc = HighLevelContext(graph)
            subctx = "subctx%d" % (n+1)
            setattr(ctx, subctx, hc)
            if not hasattr(hc, "inp"):
                raise TypeError("map_list_N context must have a subcontext called 'inp'")
            hci = hc.inp
            if not isinstance(hci, UnboundContext):
                raise TypeError("map_list_N context must have an attribute 'inp' that is a context, not a {}".format(type(hci)))
            for k0 in inp:
                k = k0[len(inp_prefix):]
                if not hasattr(hci, k):
                    raise TypeError("map_list_N context must have a cell called inp.'{}'".format(k))
                if isinstance(hci[k], StructuredCell):
                    raise TypeError("map_list_N context has a cell called inp.'{}', but its celltype must be mixed, not structured".format(k))
                if not isinstance(hci[k], CoreCell):
                    raise TypeError("map_list_N context must have an attribute inp.'{}' that is a cell, not a {}".format(k, type(hci[k])))
                if hci[k].celltype != "mixed":
                    raise TypeError("map_list_N context has a cell called inp.'{}', but its celltype must be mixed, not {}".format(k, hci[k].celltype))
                con = [".." + k], ["ctx", subctx, "inp", k]
                pseudo_connections.append(con)
                cs = inp[k0][n]
                hci[k].set_checksum(cs)
            resultname = "result%d" % (n+1)
            setattr(ctx, resultname, cell("mixed"))
            c = getattr(ctx, resultname)
            hc.result.connect(c)
            c.connect(ctx.sc.inchannels[(n,)])
            con = ["ctx", subctx, "result"], ["..result"]
            pseudo_connections.append(con)
        ctx.sc.outchannels[()].connect(ctx.result)
        ctx._pseudo_connections = pseudo_connections
        print("/MACRO")
        """
        import logging
        logging.basicConfig()
        logging.getLogger("seamless").setLevel(logging.DEBUG)
        """
        import cProfile
        cProfile.profiler.enable()
    m.code = map_list_N
    ctx.result = Cell()
    ctx.result.hash_pattern = {"!": "#"}
    ctx.result = m.result
    result.connect_from(ctx.result)
mylib.map_list_N.constructor = constructor
mylib.map_list_N.params = {
    "context_graph": "context",
    "inp": {
        "type": "celldict",
        "io": "input"
    },
    "result": {
        "type": "cell",
        "io": "output"
    },
}
ctx = Context()
ctx.adder = Context()
sctx = ctx.adder
sctx.inp = Context()
sctx.inp.a = Cell("mixed")
sctx.inp.b = Cell("mixed")
sctx.a = Cell("mixed")
sctx.b = Cell("mixed")
sctx.a = sctx.inp.a
sctx.b = sctx.inp.b
def add(a,b):
    print("ADD", a[:10])
    return a+b
sctx.add = add
sctx.add.a = sctx.a
sctx.add.b = sctx.b
sctx.result = sctx.add
sctx.result.celltype = "mixed"
ctx.compute()
ctx.data_a = Cell()
ctx.data_a.hash_pattern = {"!": "#"}
ctx.data_b = Cell()
ctx.data_b.hash_pattern = {"!": "#"}
ctx.compute()
# Next section is 14.5 secs (if the database is filled), but can be elided to ~0.5s by setting checksum directly (if in flatfile cache).
# Not having a DB at all is also 13 secs, so DB request communication (without upload) doesn't cost much.
repeat = int(10e6)
#repeat = int(5)
#for n in range(1000): # 2x10 GB
#for n in range(100): # 2x1 GB
for n in range(1000):
    a = "A:%d:" % n + str(n%10) * repeat
    b = "B:%d:" % n + str(n%10) * repeat
    ctx.data_a[n] = a
    ctx.data_b[n] = b
    if n % 20 == 0:
        ctx.compute()
    print(n+1)
ctx.compute()
print(ctx.data_a.checksum)
print(ctx.data_b.checksum)
"""
ctx.data_a.set_checksum("d07050610c50de8c810aa1d1e322786ed8932cf6eafa0fbe1f132b2c881af9c2")
ctx.data_b.set_checksum("374c02504f89ed0a760b03c3e1fd2258988576b919d763254709b66fc7bfb22e")
ctx.compute()
"""
#
### For repeat=10 million, 1000 items
### ctx.data_a.set_checksum("fa4e6aa7e7edaa6feb036fd5e8c28ffc48575cefc332187552c5be4bf0511af8")
### ctx.data_b.set_checksum("2988c44780790e4ffceb1f97391e475f165e316f27a656957282a2998aee9d4f")
### For repeat=10 million, 200 items
### ctx.data_a.set_checksum("d07050610c50de8c810aa1d1e322786ed8932cf6eafa0fbe1f132b2c881af9c2")
### ctx.data_b.set_checksum("374c02504f89ed0a760b03c3e1fd2258988576b919d763254709b66fc7bfb22e")
### For repeat=10 million
### ctx.data_a.set_checksum("983730afb7ab41d524b72f1097daaf4a3c15b98943291f96e523730849cabe8c")
### ctx.data_b.set_checksum("46dabc02b59be44064a9e06dd50bc6841833578c2b6339fbc43f090cc17831fa")
### For repeat=5
### ctx.data_a.set_checksum("9b4a551a6c1c5830d6070b9c22ae1788b9743e9637be47d56103bcda019a897c")
### ctx.data_b.set_checksum("9820f1ab795db7b0d195f21966ecb071d76e9ce2fd3a90845974a3905584eb3e")
ctx.compute()
"""
If there is no database (100 x repeat 10e6):
- 13 secs up to here (6.5 ms per MB)
- 0.5 secs to evaluate the macro
- 2.3 secs (2.8 - 0.5) for re-translation (23 ms per transformer)
- 32 secs total time, which leaves 32 - 13 - 0.5 = 18.5 secs for transformation and expression evaluation
  Since 13 secs is required for calculate checksum and decoding, that means ~5.5 secs (55 ms per transformer) overhead
  This is a supplement of 32 ms over just re-translation
If there is no database (100 x repeat 5):
- 2.3 secs up to here (12 ms per auth operation)
- Still 0.5 secs to evaluate the macro
- Still 2.3 secs (2.8 - 0.5) for re-translation (23 ms per transformer, independent of data size!)
- 6.2 secs total time, which leaves 6.2 - 2.3 - 0.5 = 3.5 secs for transformation and expression evaluation
  This is an overhead of 35 ms per transformer, a supplement of just 12 ms over re-translation
  The 20 ms reduction compared to above comes from not handling 2x10 MB of input and 20 MB of output,
  so that's 0.5 ms/MB.
If there is no database (1000 x repeat 5):
- 11.7 secs up to here (12 ms per auth operation). So scales linearly.
- 6.5 secs to evaluate the macro, so scales about linearly
- 98 secs (104.5 - 6.5) for re-translation, which is 4x slower than above  (98 ms)
- 145 secs total time, which leaves 145 - 11.7 - 6.5 = 127 secs for transformation and expression evaluation
  This is an overhead of 127 ms per transformer, which is 4x slower than above (127 ms).
  => So in principle, 90 sconds slower than might be
    - Some 45 secs is await-upon-connection-tasks, this could be optimized?
    - 12 seconds from isinstance is probably unavoidable
    - 9 seconds comes from validate deep structure, that may be unavoidable
    - 5 seconds each from taskmanager.add_task (61k tasks) and asyncio.Task.done (119 million tasks). Avoidable?
  => do maplist-inside-maplist
If the database has not been filled:
- 27.5 secs up to here
If the database has been filled:
- 14 secs up to here: to synthesize the data, and to verify that all is present
  So the raw upload is 13.5 seconds (27.5 - 14); and communication with the DB delays only 1 sec.
- 1.5 secs up to here, with the above elision.
With the database:
- 1.5 secs to evaluate the macro (DB slows down!)
- 5.5 secs for re-translation
- 45.7 secs total time, which leaves 45.7 - 5.5 - 1.5 = 38.5 secs for transformation and expression evaluation
  Compare this to the 18.5 secs w/o database, this is a loss of 20 secs.
  But we have to count the download of the inputs and upload of the results.
  When removing the tfr entries from the database, transformations will be repeated, but no buffers will be uploaded,
  as the sink knows them already.
  This brings total time down to 32 secs, the same as no database!
  So all of the extra overhead is from upload, and download is almost free. (This could be hard disk caching, though)
- 5.5 secs total time with pulling transformation results out of the DB. Again, download is almost free.
Big test with the database (1000 x repeat 10e6):
- Total time 940 secs. Data upload overhead should be ~120 secs, and Seamless data overhead should be ~140 secs.
- 142 secs for re-translation + macro evaluation (142 ms / transformation), a factor 6 slowdown
- 940 - 142 - 120 - 140 = ~540 secs for evaluation
   I.e. 540 ms per transformation. If the same slowdown applies, it would have been 90.
   But we would have expected 30. So a larger slowdown (fewer parallel expressions may have been a cause too)
"""
ctx.result = Cell()
ctx.result.hash_pattern = {"!": "#"}
ctx.compute()
ctx.include(mylib.map_list_N)
ctx.inst = ctx.lib.map_list_N(
    context_graph = ctx.adder,
    inp = {"a": ctx.data_a, "b": ctx.data_b},
    result = ctx.result
)
ctx.compute()
print("Exception:", ctx.inst.ctx.m.exception)
print(ctx.result.data)
import pstats
sortby = 'tottime'
ps = pstats.Stats(cProfile.profiler).sort_stats(sortby)
ps.print_stats(40)
t0 = time.time()
"""
print("Re-set")
graph = ctx.get_graph()
ctx_dummy = Context()
dummy_graph = ctx_dummy.get_graph()
ctx.set_graph(dummy_graph)
ctx.translate(force=True)
ctx.compute()
print(time.time()-t0)
print("Re-eval")
ctx.set_graph(graph)
"""
"""
ctx.translate(force=True)
ctx.compute()
print(time.time()-t0)
""" | 
	mit | -8,654,298,033,990,642,000 | 34.804598 | 151 | 0.662332 | false | 
| 
	PuzzleboxIO/jigsaw-python | 
	Puzzlebox/Jigsaw/Design_Plugin_Eeg.py | 
	1 | 
	34778 | 
	# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'interface/puzzlebox_jigsaw_interface_design-plugin_eeg.ui'
#
# Created: Sun May 10 18:28:33 2015
#      by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
	def setupUi(self, Form):
		Form.setObjectName("Form")
#Form.resize()
		Form.setAutoFillBackground(False)
		self.verticalLayoutWidget_2 = QtGui.QWidget(Form)
		self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(0, 0, 975, 776))
		self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
		self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
		self.verticalLayout.setContentsMargins(0, 0, 0, 0)
		self.verticalLayout.setObjectName("verticalLayout")
		self.horizontalLayout_7 = QtGui.QHBoxLayout()
		self.horizontalLayout_7.setContentsMargins(4, 4, 4, 4)
		self.horizontalLayout_7.setObjectName("horizontalLayout_7")
		spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayout_7.addItem(spacerItem)
		self.verticalLayoutEEGSource = QtGui.QVBoxLayout()
		self.verticalLayoutEEGSource.setSpacing(6)
		self.verticalLayoutEEGSource.setObjectName("verticalLayoutEEGSource")
		self.labelEEGHeadsetModel = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.labelEEGHeadsetModel.setAlignment(QtCore.Qt.AlignCenter)
		self.labelEEGHeadsetModel.setObjectName("labelEEGHeadsetModel")
		self.verticalLayoutEEGSource.addWidget(self.labelEEGHeadsetModel)
		self.comboBoxEEGHeadsetModel = QtGui.QComboBox(self.verticalLayoutWidget_2)
		self.comboBoxEEGHeadsetModel.setEnabled(True)
		self.comboBoxEEGHeadsetModel.setFocusPolicy(QtCore.Qt.NoFocus)
		self.comboBoxEEGHeadsetModel.setObjectName("comboBoxEEGHeadsetModel")
		self.comboBoxEEGHeadsetModel.addItem("")
		self.comboBoxEEGHeadsetModel.addItem("")
		self.comboBoxEEGHeadsetModel.addItem("")
		self.comboBoxEEGHeadsetModel.addItem("")
		self.comboBoxEEGHeadsetModel.addItem("")
		self.verticalLayoutEEGSource.addWidget(self.comboBoxEEGHeadsetModel)
		self.line_3 = QtGui.QFrame(self.verticalLayoutWidget_2)
		self.line_3.setFrameShape(QtGui.QFrame.HLine)
		self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
		self.line_3.setObjectName("line_3")
		self.verticalLayoutEEGSource.addWidget(self.line_3)
		self.labelEEGServerSource = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.labelEEGServerSource.setAlignment(QtCore.Qt.AlignCenter)
		self.labelEEGServerSource.setObjectName("labelEEGServerSource")
		self.verticalLayoutEEGSource.addWidget(self.labelEEGServerSource)
		self.comboBoxEEGSource = QtGui.QComboBox(self.verticalLayoutWidget_2)
		self.comboBoxEEGSource.setEnabled(True)
		self.comboBoxEEGSource.setObjectName("comboBoxEEGSource")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.comboBoxEEGSource.addItem("")
		self.verticalLayoutEEGSource.addWidget(self.comboBoxEEGSource)
		self.comboBoxDeviceSelect = QtGui.QComboBox(self.verticalLayoutWidget_2)
		self.comboBoxDeviceSelect.setObjectName("comboBoxDeviceSelect")
		self.comboBoxDeviceSelect.addItem("")
		self.verticalLayoutEEGSource.addWidget(self.comboBoxDeviceSelect)
		self.pushButtonControlSearch = QtGui.QPushButton(self.verticalLayoutWidget_2)
		self.pushButtonControlSearch.setObjectName("pushButtonControlSearch")
		self.verticalLayoutEEGSource.addWidget(self.pushButtonControlSearch)
		self.checkBoxControlEmulateThinkGear = QtGui.QCheckBox(self.verticalLayoutWidget_2)
		self.checkBoxControlEmulateThinkGear.setChecked(True)
		self.checkBoxControlEmulateThinkGear.setObjectName("checkBoxControlEmulateThinkGear")
		self.verticalLayoutEEGSource.addWidget(self.checkBoxControlEmulateThinkGear)
		self.lineControlSourceServer = QtGui.QFrame(self.verticalLayoutWidget_2)
		self.lineControlSourceServer.setFrameShape(QtGui.QFrame.HLine)
		self.lineControlSourceServer.setFrameShadow(QtGui.QFrame.Sunken)
		self.lineControlSourceServer.setObjectName("lineControlSourceServer")
		self.verticalLayoutEEGSource.addWidget(self.lineControlSourceServer)
		self.checkBoxControlEnableServer = QtGui.QCheckBox(self.verticalLayoutWidget_2)
		self.checkBoxControlEnableServer.setChecked(True)
		self.checkBoxControlEnableServer.setObjectName("checkBoxControlEnableServer")
		self.verticalLayoutEEGSource.addWidget(self.checkBoxControlEnableServer)
		self.formLayoutControlHostPort = QtGui.QFormLayout()
		self.formLayoutControlHostPort.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
		self.formLayoutControlHostPort.setObjectName("formLayoutControlHostPort")
		self.textLabelSynapseHost = QtGui.QLabel(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.textLabelSynapseHost.sizePolicy().hasHeightForWidth())
		self.textLabelSynapseHost.setSizePolicy(sizePolicy)
		self.textLabelSynapseHost.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelSynapseHost.setObjectName("textLabelSynapseHost")
		self.formLayoutControlHostPort.setWidget(0, QtGui.QFormLayout.LabelRole, self.textLabelSynapseHost)
		self.lineEditSynapseHost = QtGui.QLineEdit(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.lineEditSynapseHost.sizePolicy().hasHeightForWidth())
		self.lineEditSynapseHost.setSizePolicy(sizePolicy)
		self.lineEditSynapseHost.setAlignment(QtCore.Qt.AlignHCenter)
		self.lineEditSynapseHost.setObjectName("lineEditSynapseHost")
		self.formLayoutControlHostPort.setWidget(0, QtGui.QFormLayout.FieldRole, self.lineEditSynapseHost)
		self.textLabelSynapsePort = QtGui.QLabel(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.textLabelSynapsePort.sizePolicy().hasHeightForWidth())
		self.textLabelSynapsePort.setSizePolicy(sizePolicy)
		self.textLabelSynapsePort.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelSynapsePort.setWordWrap(False)
		self.textLabelSynapsePort.setObjectName("textLabelSynapsePort")
		self.formLayoutControlHostPort.setWidget(1, QtGui.QFormLayout.LabelRole, self.textLabelSynapsePort)
		self.lineEditSynapsePort = QtGui.QLineEdit(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.lineEditSynapsePort.sizePolicy().hasHeightForWidth())
		self.lineEditSynapsePort.setSizePolicy(sizePolicy)
		self.lineEditSynapsePort.setAlignment(QtCore.Qt.AlignHCenter)
		self.lineEditSynapsePort.setObjectName("lineEditSynapsePort")
		self.formLayoutControlHostPort.setWidget(1, QtGui.QFormLayout.FieldRole, self.lineEditSynapsePort)
		self.verticalLayoutEEGSource.addLayout(self.formLayoutControlHostPort)
		self.horizontalLayout_3 = QtGui.QHBoxLayout()
		self.horizontalLayout_3.setObjectName("horizontalLayout_3")
		self.pushButtonSynapseServer = QtGui.QPushButton(self.verticalLayoutWidget_2)
		self.pushButtonSynapseServer.setMinimumSize(QtCore.QSize(0, 64))
		self.pushButtonSynapseServer.setObjectName("pushButtonSynapseServer")
		self.horizontalLayout_3.addWidget(self.pushButtonSynapseServer)
		self.verticalLayoutEEGSource.addLayout(self.horizontalLayout_3)
		spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
		self.verticalLayoutEEGSource.addItem(spacerItem1)
		self.horizontalLayout_7.addLayout(self.verticalLayoutEEGSource)
		self.verticalLayoutEEGPlot = QtGui.QVBoxLayout()
		self.verticalLayoutEEGPlot.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
		self.verticalLayoutEEGPlot.setObjectName("verticalLayoutEEGPlot")
		self.horizontalLayoutChartTop = QtGui.QHBoxLayout()
		self.horizontalLayoutChartTop.setObjectName("horizontalLayoutChartTop")
		spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayoutChartTop.addItem(spacerItem2)
		self.labelChartTop = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.labelChartTop.setObjectName("labelChartTop")
		self.horizontalLayoutChartTop.addWidget(self.labelChartTop)
		spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayoutChartTop.addItem(spacerItem3)
		self.verticalLayoutEEGPlot.addLayout(self.horizontalLayoutChartTop)
		self.widgetPlotRawEEG = QtGui.QWidget(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.widgetPlotRawEEG.sizePolicy().hasHeightForWidth())
		self.widgetPlotRawEEG.setSizePolicy(sizePolicy)
		self.widgetPlotRawEEG.setMinimumSize(QtCore.QSize(520, 160))
		self.widgetPlotRawEEG.setObjectName("widgetPlotRawEEG")
		self.labelMockupRawEEG = QtGui.QLabel(self.widgetPlotRawEEG)
		self.labelMockupRawEEG.setGeometry(QtCore.QRect(0, 0, 520, 160))
		self.labelMockupRawEEG.setText("")
		self.labelMockupRawEEG.setPixmap(QtGui.QPixmap("../../../../.designer/backup/images/mockup-raw_eeg_waves.png"))
		self.labelMockupRawEEG.setObjectName("labelMockupRawEEG")
		self.verticalLayoutEEGPlot.addWidget(self.widgetPlotRawEEG)
		self.widgetPlotHistoryEmotivCognitiv = QtGui.QWidget(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.widgetPlotHistoryEmotivCognitiv.sizePolicy().hasHeightForWidth())
		self.widgetPlotHistoryEmotivCognitiv.setSizePolicy(sizePolicy)
		self.widgetPlotHistoryEmotivCognitiv.setMinimumSize(QtCore.QSize(520, 160))
		self.widgetPlotHistoryEmotivCognitiv.setObjectName("widgetPlotHistoryEmotivCognitiv")
		self.labelMockupHistoryEmotivCognitiv = QtGui.QLabel(self.widgetPlotHistoryEmotivCognitiv)
		self.labelMockupHistoryEmotivCognitiv.setGeometry(QtCore.QRect(0, 0, 520, 160))
		self.labelMockupHistoryEmotivCognitiv.setText("")
		self.labelMockupHistoryEmotivCognitiv.setPixmap(QtGui.QPixmap("../../../../.designer/backup/images/mockup-esense_values_history.jpg"))
		self.labelMockupHistoryEmotivCognitiv.setScaledContents(True)
		self.labelMockupHistoryEmotivCognitiv.setObjectName("labelMockupHistoryEmotivCognitiv")
		self.verticalLayoutEEGPlot.addWidget(self.widgetPlotHistoryEmotivCognitiv)
		self.horizontalLayoutChartBottom = QtGui.QHBoxLayout()
		self.horizontalLayoutChartBottom.setObjectName("horizontalLayoutChartBottom")
		spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayoutChartBottom.addItem(spacerItem4)
		self.labelChartBottom = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.labelChartBottom.setTextFormat(QtCore.Qt.AutoText)
		self.labelChartBottom.setObjectName("labelChartBottom")
		self.horizontalLayoutChartBottom.addWidget(self.labelChartBottom)
		spacerItem5 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayoutChartBottom.addItem(spacerItem5)
		self.verticalLayoutEEGPlot.addLayout(self.horizontalLayoutChartBottom)
		self.widgetPlotHistoryThinkGear = QtGui.QWidget(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.widgetPlotHistoryThinkGear.sizePolicy().hasHeightForWidth())
		self.widgetPlotHistoryThinkGear.setSizePolicy(sizePolicy)
		self.widgetPlotHistoryThinkGear.setMinimumSize(QtCore.QSize(520, 160))
		self.widgetPlotHistoryThinkGear.setObjectName("widgetPlotHistoryThinkGear")
		self.labelMockupHistoryThinkGear = QtGui.QLabel(self.widgetPlotHistoryThinkGear)
		self.labelMockupHistoryThinkGear.setGeometry(QtCore.QRect(0, 0, 520, 160))
		self.labelMockupHistoryThinkGear.setText("")
		self.labelMockupHistoryThinkGear.setPixmap(QtGui.QPixmap("../../../../.designer/backup/images/mockup-esense_values_history.jpg"))
		self.labelMockupHistoryThinkGear.setScaledContents(True)
		self.labelMockupHistoryThinkGear.setObjectName("labelMockupHistoryThinkGear")
		self.verticalLayoutEEGPlot.addWidget(self.widgetPlotHistoryThinkGear)
		self.widgetPlotHistoryEmotivAffectiv = QtGui.QWidget(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.widgetPlotHistoryEmotivAffectiv.sizePolicy().hasHeightForWidth())
		self.widgetPlotHistoryEmotivAffectiv.setSizePolicy(sizePolicy)
		self.widgetPlotHistoryEmotivAffectiv.setMinimumSize(QtCore.QSize(520, 160))
		self.widgetPlotHistoryEmotivAffectiv.setObjectName("widgetPlotHistoryEmotivAffectiv")
		self.labelMockupHistoryEmotivAffectiv = QtGui.QLabel(self.widgetPlotHistoryEmotivAffectiv)
		self.labelMockupHistoryEmotivAffectiv.setGeometry(QtCore.QRect(0, 0, 520, 160))
		self.labelMockupHistoryEmotivAffectiv.setText("")
		self.labelMockupHistoryEmotivAffectiv.setPixmap(QtGui.QPixmap("../../../../.designer/backup/images/mockup-esense_values_history.jpg"))
		self.labelMockupHistoryEmotivAffectiv.setScaledContents(True)
		self.labelMockupHistoryEmotivAffectiv.setObjectName("labelMockupHistoryEmotivAffectiv")
		self.verticalLayoutEEGPlot.addWidget(self.widgetPlotHistoryEmotivAffectiv)
		spacerItem6 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
		self.verticalLayoutEEGPlot.addItem(spacerItem6)
		self.horizontalLayout_7.addLayout(self.verticalLayoutEEGPlot)
		self.verticalLayoutSessionProfile = QtGui.QVBoxLayout()
		self.verticalLayoutSessionProfile.setObjectName("verticalLayoutSessionProfile")
		self.label = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.label.setObjectName("label")
		self.verticalLayoutSessionProfile.addWidget(self.label)
		self.horizontalLayout_4 = QtGui.QHBoxLayout()
		self.horizontalLayout_4.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
		self.horizontalLayout_4.setObjectName("horizontalLayout_4")
		self.textLabelControlConcentration = QtGui.QLabel(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.textLabelControlConcentration.sizePolicy().hasHeightForWidth())
		self.textLabelControlConcentration.setSizePolicy(sizePolicy)
		self.textLabelControlConcentration.setWordWrap(False)
		self.textLabelControlConcentration.setObjectName("textLabelControlConcentration")
		self.horizontalLayout_4.addWidget(self.textLabelControlConcentration)
		spacerItem7 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayout_4.addItem(spacerItem7)
		self.pushButtonControlConcentrationEnable = QtGui.QPushButton(self.verticalLayoutWidget_2)
		self.pushButtonControlConcentrationEnable.setEnabled(False)
		self.pushButtonControlConcentrationEnable.setCheckable(True)
		self.pushButtonControlConcentrationEnable.setChecked(True)
		self.pushButtonControlConcentrationEnable.setObjectName("pushButtonControlConcentrationEnable")
		self.horizontalLayout_4.addWidget(self.pushButtonControlConcentrationEnable)
		self.verticalLayoutSessionProfile.addLayout(self.horizontalLayout_4)
		self.progressBarControlConcentration = QtGui.QProgressBar(self.verticalLayoutWidget_2)
		self.progressBarControlConcentration.setEnabled(True)
		self.progressBarControlConcentration.setMinimumSize(QtCore.QSize(0, 60))
		self.progressBarControlConcentration.setProperty("value", 0)
		self.progressBarControlConcentration.setOrientation(QtCore.Qt.Horizontal)
		self.progressBarControlConcentration.setObjectName("progressBarControlConcentration")
		self.verticalLayoutSessionProfile.addWidget(self.progressBarControlConcentration)
		self.horizontalLayout_5 = QtGui.QHBoxLayout()
		self.horizontalLayout_5.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
		self.horizontalLayout_5.setObjectName("horizontalLayout_5")
		self.textLabelControlRelaxation = QtGui.QLabel(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.textLabelControlRelaxation.sizePolicy().hasHeightForWidth())
		self.textLabelControlRelaxation.setSizePolicy(sizePolicy)
		self.textLabelControlRelaxation.setWordWrap(False)
		self.textLabelControlRelaxation.setObjectName("textLabelControlRelaxation")
		self.horizontalLayout_5.addWidget(self.textLabelControlRelaxation)
		spacerItem8 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayout_5.addItem(spacerItem8)
		self.pushButtonControlRelaxationEnable = QtGui.QPushButton(self.verticalLayoutWidget_2)
		self.pushButtonControlRelaxationEnable.setEnabled(False)
		self.pushButtonControlRelaxationEnable.setCheckable(True)
		self.pushButtonControlRelaxationEnable.setChecked(True)
		self.pushButtonControlRelaxationEnable.setObjectName("pushButtonControlRelaxationEnable")
		self.horizontalLayout_5.addWidget(self.pushButtonControlRelaxationEnable)
		self.verticalLayoutSessionProfile.addLayout(self.horizontalLayout_5)
		self.progressBarControlRelaxation = QtGui.QProgressBar(self.verticalLayoutWidget_2)
		self.progressBarControlRelaxation.setEnabled(True)
		self.progressBarControlRelaxation.setMinimumSize(QtCore.QSize(0, 60))
		self.progressBarControlRelaxation.setProperty("value", 0)
		self.progressBarControlRelaxation.setOrientation(QtCore.Qt.Horizontal)
		self.progressBarControlRelaxation.setObjectName("progressBarControlRelaxation")
		self.verticalLayoutSessionProfile.addWidget(self.progressBarControlRelaxation)
		spacerItem9 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.verticalLayoutSessionProfile.addItem(spacerItem9)
		self.horizontalLayout_6 = QtGui.QHBoxLayout()
		self.horizontalLayout_6.setObjectName("horizontalLayout_6")
		self.textLabelControlConnectionLevel = QtGui.QLabel(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.textLabelControlConnectionLevel.sizePolicy().hasHeightForWidth())
		self.textLabelControlConnectionLevel.setSizePolicy(sizePolicy)
		self.textLabelControlConnectionLevel.setWordWrap(False)
		self.textLabelControlConnectionLevel.setObjectName("textLabelControlConnectionLevel")
		self.horizontalLayout_6.addWidget(self.textLabelControlConnectionLevel)
		spacerItem10 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayout_6.addItem(spacerItem10)
		self.verticalLayoutSessionProfile.addLayout(self.horizontalLayout_6)
		self.progressBarControlConnectionLevel = QtGui.QProgressBar(self.verticalLayoutWidget_2)
		self.progressBarControlConnectionLevel.setEnabled(True)
		self.progressBarControlConnectionLevel.setProperty("value", 0)
		self.progressBarControlConnectionLevel.setOrientation(QtCore.Qt.Horizontal)
		self.progressBarControlConnectionLevel.setObjectName("progressBarControlConnectionLevel")
		self.verticalLayoutSessionProfile.addWidget(self.progressBarControlConnectionLevel)
		self.line_6 = QtGui.QFrame(self.verticalLayoutWidget_2)
		self.line_6.setFrameShape(QtGui.QFrame.HLine)
		self.line_6.setFrameShadow(QtGui.QFrame.Sunken)
		self.line_6.setObjectName("line_6")
		self.verticalLayoutSessionProfile.addWidget(self.line_6)
		self.formLayoutSessionData = QtGui.QFormLayout()
		self.formLayoutSessionData.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
		self.formLayoutSessionData.setObjectName("formLayoutSessionData")
		self.textLabelSessionTimeTitle = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelSessionTimeTitle.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelSessionTimeTitle.setObjectName("textLabelSessionTimeTitle")
		self.formLayoutSessionData.setWidget(0, QtGui.QFormLayout.LabelRole, self.textLabelSessionTimeTitle)
		self.textLabelSessionTime = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelSessionTime.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelSessionTime.setObjectName("textLabelSessionTime")
		self.formLayoutSessionData.setWidget(0, QtGui.QFormLayout.FieldRole, self.textLabelSessionTime)
		self.textLabelPacketsReceivedTitle = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelPacketsReceivedTitle.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelPacketsReceivedTitle.setObjectName("textLabelPacketsReceivedTitle")
		self.formLayoutSessionData.setWidget(1, QtGui.QFormLayout.LabelRole, self.textLabelPacketsReceivedTitle)
		self.textLabelPacketsReceived = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelPacketsReceived.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelPacketsReceived.setObjectName("textLabelPacketsReceived")
		self.formLayoutSessionData.setWidget(1, QtGui.QFormLayout.FieldRole, self.textLabelPacketsReceived)
		self.textLabelPacketsDroppedTitle = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelPacketsDroppedTitle.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelPacketsDroppedTitle.setObjectName("textLabelPacketsDroppedTitle")
		self.formLayoutSessionData.setWidget(2, QtGui.QFormLayout.LabelRole, self.textLabelPacketsDroppedTitle)
		self.textLabelPacketsDropped = QtGui.QLabel(self.verticalLayoutWidget_2)
		self.textLabelPacketsDropped.setAlignment(QtCore.Qt.AlignCenter)
		self.textLabelPacketsDropped.setObjectName("textLabelPacketsDropped")
		self.formLayoutSessionData.setWidget(2, QtGui.QFormLayout.FieldRole, self.textLabelPacketsDropped)
		self.verticalLayoutSessionProfile.addLayout(self.formLayoutSessionData)
		spacerItem11 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
		self.verticalLayoutSessionProfile.addItem(spacerItem11)
		self.horizontalLayout_7.addLayout(self.verticalLayoutSessionProfile)
		spacerItem12 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
		self.horizontalLayout_7.addItem(spacerItem12)
		self.verticalLayout.addLayout(self.horizontalLayout_7)
		self.webViewEEG = QtWebKit.QWebView(self.verticalLayoutWidget_2)
		sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
		sizePolicy.setHorizontalStretch(0)
		sizePolicy.setVerticalStretch(0)
		sizePolicy.setHeightForWidth(self.webViewEEG.sizePolicy().hasHeightForWidth())
		self.webViewEEG.setSizePolicy(sizePolicy)
		self.webViewEEG.setObjectName("webViewEEG")
		self.verticalLayout.addWidget(self.webViewEEG)
		spacerItem13 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
		self.verticalLayout.addItem(spacerItem13)
		self.retranslateUi(Form)
		QtCore.QMetaObject.connectSlotsByName(Form)
	def retranslateUi(self, Form):
		Form.setWindowTitle(QtGui.QApplication.translate("Form", "Puzzlebox Jigsaw - Plug-in - EEG", None, QtGui.QApplication.UnicodeUTF8))
		self.labelEEGHeadsetModel.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">EEG Headset Model</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGHeadsetModel.setItemText(0, QtGui.QApplication.translate("Form", "NeuroSky MindWave Mobile", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGHeadsetModel.setItemText(1, QtGui.QApplication.translate("Form", "NeuroSky MindWave", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGHeadsetModel.setItemText(2, QtGui.QApplication.translate("Form", "NeuroSky MindSet", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGHeadsetModel.setItemText(3, QtGui.QApplication.translate("Form", "Emotiv EPOC", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGHeadsetModel.setItemText(4, QtGui.QApplication.translate("Form", "InterAxon Muse", None, QtGui.QApplication.UnicodeUTF8))
		self.labelEEGServerSource.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">EEG Data Source</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(0, QtGui.QApplication.translate("Form", "Hardware Device", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(1, QtGui.QApplication.translate("Form", "ThinkGear Emulator", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(2, QtGui.QApplication.translate("Form", "Puzzlebox Synapse", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(3, QtGui.QApplication.translate("Form", "ThinkGear Connect", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(4, QtGui.QApplication.translate("Form", "BCI2000", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(5, QtGui.QApplication.translate("Form", "MuseIO", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxEEGSource.setItemText(6, QtGui.QApplication.translate("Form", "Spacebrew", None, QtGui.QApplication.UnicodeUTF8))
		self.comboBoxDeviceSelect.setItemText(0, QtGui.QApplication.translate("Form", "No Devices Found", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonControlSearch.setText(QtGui.QApplication.translate("Form", "Search", None, QtGui.QApplication.UnicodeUTF8))
		self.checkBoxControlEmulateThinkGear.setText(QtGui.QApplication.translate("Form", "Emulate NeuroSky ThinkGear", None, QtGui.QApplication.UnicodeUTF8))
		self.checkBoxControlEnableServer.setText(QtGui.QApplication.translate("Form", "Enable Synapse Server", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelSynapseHost.setText(QtGui.QApplication.translate("Form", "Host", None, QtGui.QApplication.UnicodeUTF8))
		self.lineEditSynapseHost.setText(QtGui.QApplication.translate("Form", "N/A", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelSynapsePort.setText(QtGui.QApplication.translate("Form", "<p align=\"center\">Port</p>", None, QtGui.QApplication.UnicodeUTF8))
		self.lineEditSynapsePort.setText(QtGui.QApplication.translate("Form", "N/A", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonSynapseServer.setText(QtGui.QApplication.translate("Form", "Connect", None, QtGui.QApplication.UnicodeUTF8))
		self.labelChartTop.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Raw EEG Waves</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.labelChartBottom.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Brain Signals History</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.label.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Sans\'; font-size:10pt; font-weight:600;\">Current Data</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelControlConcentration.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:9pt;\">Concentration</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonControlConcentrationEnable.setToolTip(QtGui.QApplication.translate("Form", "enable/disable concentration processing", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonControlConcentrationEnable.setText(QtGui.QApplication.translate("Form", "Enabled", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelControlRelaxation.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:9pt;\">Relaxation</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonControlRelaxationEnable.setToolTip(QtGui.QApplication.translate("Form", "enable/disable relaxation processing", None, QtGui.QApplication.UnicodeUTF8))
		self.pushButtonControlRelaxationEnable.setText(QtGui.QApplication.translate("Form", "Enabled", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelControlConnectionLevel.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Connection Level</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelSessionTimeTitle.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Connection Time</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelSessionTime.setText(QtGui.QApplication.translate("Form", "00:00:00", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelPacketsReceivedTitle.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Packets Received</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelPacketsReceived.setText(QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelPacketsDroppedTitle.setText(QtGui.QApplication.translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Packets Dropped</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
		self.textLabelPacketsDropped.setText(QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8))
from PySide import QtWebKit
 | 
	agpl-3.0 | -5,169,383,353,182,562,000 | 77.328829 | 289 | 0.807637 | false | 
| 
	pcaro90/Python-AES | 
	ProgressBar.py | 
	1 | 
	1773 | 
	#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ---------------------------------------------------
# Copyright (c) 2013 Pablo Caro. All Rights Reserved.
# Pablo Caro <[email protected]> - http://pcaro.es/
# ProgressBar.py
# ---------------------------------------------------
import sys
class ProgressBar:
    def __init__(self, min=0, max=100, width=60, charset='[=]'):
        self.min = min
        self.max = max
        self.width = width
        self.current = min
        self.percent = 0.0
        self.int_percent = 0
        if len(charset) != 3:
            charset = '[=]'
        self.charset = charset
        self.bar = ''
        self.used = -1
        self.int_percent_change = False
    def update(self, current):
        self.current = current
        self.percent = (float(self.current-self.min)/(self.max-self.min))*100.0
        int_percent = int(self.percent)
        if int_percent != self.int_percent:
            self.int_percent_change = True
        self.int_percent = int_percent
        self.__generate_bar__()
        if self.int_percent_change:
            self.int_percent_change = False
            return True
        else:
            return False
    def show(self):
        sys.stdout.write(str(self))
        sys.stdout.flush()
    def __str__(self):
        return self.bar
    def __generate_bar__(self):
        self.used = int((float(self.current-self.min)/(self.max-self.min)) *
                        (self.width-6))
        center = self.charset[1] * self.used
        self.bar = (self.charset[0] + center + self.charset[2]
                    + " " + str(self.int_percent) + '%' + '\r')
def main():
    pass
if __name__ == '__main__':
    main()
 | 
	bsd-3-clause | 3,480,556,556,003,482,000 | 24.462687 | 79 | 0.478285 | false | 
| 
	kefatong/ops | 
	app/models.py | 
	1 | 
	24843 | 
	# -*- coding:utf-8 -*-
__author__ = 'eric'
import hashlib
from datetime import datetime
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from werkzeug.security import generate_password_hash, check_password_hash
from flask.ext.login import UserMixin
from flask import current_app, request, url_for
from . import db
from . import login_manager
class Permission:
    ADMINISTER = 0x001
    DEVICE_LOOK = 0x002
    DEVICE_EDIT = 0x004
    DEVICE_DEL = 0x008
    RACK_LOOK = 0x010
    RACK_EDIT = 0x020
    RACK_DEL = 0x040
    IDC_LOOK = 0x080
    IDC_EDIT = 0x100
    IDC_DEL = 0x200
    VIRTMACHINE_LOOK = 0x400
    VIRTMACHINE_EDIT = 0x800
    VIRTMACHINE_DEL = 0x1000
    NETWORKDEVICE_LOOK = 0x2000
    NETWORKDEVICE_EDIT = 0x4000
    NETWORKDEVICE_DEL = 0x8000
    DATABASE_LOOK = 0x10000
    DATABASE_EDIT = 0x20000
    DATABASE_DEL = 0x40000
    SOFTWARE_LOOK = 0x80000
    SOFTWARE_EDIT = 0x100000
    SOFTWARE_DEL = 0x200000
    DEVICEPERMISSION_LOOK = 0x400000
    DEVICEPERMISSION_EDIT = 0x800000
    DEVICEPERMISSION_DEL = 0x1000000
    ASSET_LOOK = 0x2000000
    ASSET_EDIT = 0x4000000
    ASSET_DEL = 0x8000000
    USER_LOOK = 0x10000000
    USER_EDIT = 0x20000000
    USER_DEL = 0x40000000
class Role(db.Model):
    __tablename__ = 'roles'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64), unique=True)
    default = db.Column(db.Boolean, index=True, default=False)
    permissions = db.Column(db.Integer)
    users = db.relationship('User', backref='role', lazy='dynamic')
    def to_json(self):
        json_role = {
            'url': self.id,
            'name': self.name,
            'default': self.default,
            'permissions': self.permissions,
            'users': self.users,
        }
        return json_role
    @staticmethod
    def insert_roles():
        roles = {
            'User': (Permission.DEVICE_LOOK, True),
            'manager': (Permission.USER_EDIT |
                        Permission.DEVICE_LOOK |
                        Permission.DEVICE_EDIT, False),
            'Administrator': (Permission.USER_EDIT |
                              Permission.DEVICE_LOOK |
                              Permission.DEVICE_EDIT |
                              Permission.DEVICE_DEL |
                              Permission.ADMINISTER, False)
        }
        for r in roles:
            role = Role.query.filter_by(name=r).first()
            if role is None:
                role = Role(name=r)
            role.permissions = roles[r][0]
            role.default = roles[r][1]
            db.session.add(role)
        db.session.commit()
    def __repr__(self):
        return '<Role %r>' % self.name
class User(UserMixin, db.Model):
    __tablename__ = 'users'
    id = db.Column(db.Integer, primary_key=True)
    email = db.Column(db.String(64), unique=True, index=True)  # Email Address
    username = db.Column(db.String(64), unique=True, index=True)  # Username
    password_hash = db.Column(db.String(128))  # password Md5 Hash
    role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))  # Role 鍏宠仈 Role table
    name = db.Column(db.String(64))  # 鐪熷疄濮撳悕
    location = db.Column(db.String(64))  # 鍦板潃
    position = db.Column(db.String(64))  # 职位
    about_me = db.Column(db.Text())  # 鍏充簬鎴�
    phone = db.Column(db.String(11))  # 手机号码
    qq = db.Column(db.String(13))  # QQ号码
    member_since = db.Column(db.DateTime(), default=datetime.utcnow)  # 娉ㄥ唽鏃堕棿
    last_seen = db.Column(db.DateTime(), default=datetime.utcnow)  # 鏈�鍚庣櫥褰曟椂闂�
    confirmed = db.Column(db.Boolean, default=False)  # 璐︽埛鐘舵��
    avatar_hash = db.Column(db.String(32))  # 澶村儚
    logs = db.relationship('Logger', backref='user', lazy='dynamic')
    def to_json(self):
        json_user = {
            'url': self.id,
            'email': self.email,
            'username': self.username,
            'password_hash': self.password_hash,
            'role': self.role,
            'name': self.name,
            'location': self.location,
            'position': self.position,
            'about_me': self.about_me,
            'phone': self.phone,
            'qq': self.qq,
            'member_sine': self.member_since,
            'last_seen': self.last_seen,
        }
        return json_user
    def __init__(self, **kwargs):
        super(User, self).__init__(**kwargs)
        if self.role is None:
            if self.email == current_app.config.get('FLASK_ADMIN', None):
                self.role = Role.query.filter_by(permissions=0xff).first()
            if self.role is None:
                self.role = Role.query.filter_by(default=True).first()
        if self.email is not None and self.avatar_hash is None:
            self.avatar_hash = hashlib.md5(self.email.encode('UTF-8')).hexdigest()
    @staticmethod
    def insert_admin_user():
        r = Role()
        r.insert_roles()
        adminRole = Role.query.all()[-1]
        u = User.query.filter_by(username='administrator').first()
        if u is None:
            u = User()
        u.name = 'Admin'
        u.email = '[email protected]'
        u.username = 'administrator'
        u.password = '123456'
        u.confirmed = True
        u.role = adminRole
        db.session.add(u)
        db.session.commit()
    @staticmethod
    def generate_fake(count=1000):
        from sqlalchemy.exc import IntegrityError
        from random import seed
        import forgery_py
        seed()
        for i in range(count):
            u = User(email=forgery_py.internet.email_address(),
                     username=forgery_py.internet.user_name(True),
                     password=forgery_py.lorem_ipsum.word(),
                     confirmed=True,
                     name=forgery_py.name.full_name(),
                     location=forgery_py.address.city(),
                     position=forgery_py.lorem_ipsum.sentence(),
                     about_me=forgery_py.lorem_ipsum.sentence(),
                     member_since=forgery_py.date.date(True))
            db.session.add(u)
            try:
                db.session.commit()
            except IntegrityError:
                print "db commit email : {0} Error".format(u.email)
                db.session.rollback()
    def gravatar(self, size=100, default='identicon', rating='g'):
        if request.is_secure:
            url = 'https://secure.gravatar.com/avatar'
        else:
            url = 'http://secure.gravatar.com/avatar'
        hash = self.avatar_hash or hashlib.md5(self.email.encode('UTF-8')).hexdigest()
        return '{url}/{hash}?s={size}&d={default}&r={rating}'.format(url=url, hash=hash, size=size, default=default,
                                                                     rating=rating)
    def ping(self):
        self.last_seen = datetime.utcnow()
        db.session.add(self)
    def can(self, permissions):
        return self.role is not None and (self.role.permissions & permissions) == permissions
    def is_administrator(self):
        return self.can(Permission.ADMINISTER)
    @property
    def password(self):
        raise AttributeError('password is not a readable attribute')
    @password.setter
    def password(self, password):
        self.password_hash = generate_password_hash(password)
    def verify_password(self, password):
        return check_password_hash(self.password_hash, password)
    def generate_confirmation_token(self, expiration=3600):
        s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
        return s.dumps({'confirm': self.id})
    def confirm(self, token):
        s = Serializer(current_app.config['SECRET_KEY'])
        try:
            data = s.loads(token)
        except:
            return False
        if data.get('confirm', None) != self.id:
            return False
        self.confirmed = True
        print self.confirmed
        db.session.add(self)
        return True
    def generate_reset_token(self, expiration=3600):
        s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
        return s.dumps({'reset': self.id})
    def reset_password(self, token, new_password):
        s = Serializer(current_app.config['SECRET_KEY'])
        try:
            data = s.loads(token)
        except:
            return False
        if data.get('reset', None) != self.id:
            return False
        self.password = new_password
        db.session.add(self)
        return True
    def generate_change_email_token(self, new_email, expiration=3600):
        s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
        return s.dumps({'change_email': self.id, 'new_email': new_email})
    def change_email(self, token):
        s = Serializer(current_app.config['SECRET_KEY'])
        try:
            data = s.loads(token)
        except:
            return False
        if data.get('change_email') != self.id:
            return False
        new_email = data.get('new_email', None)
        if new_email is None:
            return False
        if self.query.filter_by(email=new_email).first() is not None:
            return False
        self.email = new_email
        self.avatar_hash = hashlib.md5(self.email.encode('UTF-8')).hexdigest()
        db.session.add(self)
        return True
    def generate_auth_token(self, expiration=3600):
        s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
        return s.dumps({'id': self.id})
    @staticmethod
    def verify_auth_token(token):
        s = Serializer(current_app.config['SECRET_KEY'])
        try:
            data = s.loads(token)
        except:
            return None
        return User.query.get(data['id'])
    def __repr__(self):
        return '<User %r>' % self.username
class DevicePower(db.Model):
    __tablename__ = 'devicePowers'
    id = db.Column(db.Integer, primary_key=True)
    type = db.Column(db.Integer)
    enabled = db.Column(db.Boolean, default=False)
    ip = db.Column(db.String(64))  # 杩滄帶鍗P鍦板潃
    user = db.Column(db.String(64))
    password_hash = db.Column(db.String(256))
    powerid = db.Column(db.String(256))
    device_id = db.Column(db.ForeignKey('devices.id'))
    isdelete = db.Column(db.Boolean, default=False)  # 鏄惁鍒犻櫎
    remarks = db.Column(db.Text)  # 澶囨敞
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    def generate_password_token(self, password):
        from itsdangerous import JSONWebSignatureSerializer as Serializer
        s = Serializer(current_app.config['SECRET_KEY'])
        return s.dumps({'confirm': password})
    @property
    def password(self):
        raise AttributeError('password is not a readable attribute')
    @password.setter
    def password(self, password):
        self.password_hash = self.generate_password_token(password)
    def to_json(self):
        json_power = {
            'url': self.id,
            'type': self.type,
            'enabled': self.enabled,
            'ip': self.ip,
            'user': self.user,
            'password': self.password_hash,
            'powerid': self.powerid,
            'device_id': self.device_id,
        }
        return json_power
    def __repr__(self):
        return '<DevicePower %r>' % self.id
DeviceRelationshipDeviceGroup = db.Table('DeviceRelationshipDeviceGroup',
                                         db.Column('deviceGroup_id', db.Integer, db.ForeignKey('deviceGroup.id')),
                                         db.Column('device_id', db.Integer, db.ForeignKey('devices.id')),
                                         )
class DeviceGroup(db.Model):
    __tablename__ = 'deviceGroup'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    business = db.Column(db.String(64))  # 所属业务
    devices = db.relationship('Device', secondary=DeviceRelationshipDeviceGroup,
                              backref=db.backref('DeviceGroup', lazy='dynamic'), lazy='dynamic')
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    remarks = db.Column(db.Text)  # 澶囨敞
class Device(db.Model):
    __tablename__ = 'devices'
    id = db.Column(db.Integer, primary_key=True)
    device_id = db.Column(db.Integer, unique=True, index=True)
    hostname = db.Column(db.String(64))  # Hostname
    ip = db.Column(db.String(64))
    an = db.Column(db.String(64), unique=True, index=True)
    sn = db.Column(db.String(64), unique=True, index=True)  # SN 璁惧搴忓垪鍙�
    os = db.Column(db.String(64))  # os绫诲瀷
    manufacturer = db.Column(db.String(64))  # 鐢熶骇鍟�
    brand = db.Column(db.String(64))  # 鍝佺墝
    model = db.Column(db.String(64))  # 鍨嬪彿
    cpumodel = db.Column(db.String(64))  # CPU 鍨嬪彿
    cpucount = db.Column(db.Integer)  # CPU 鏍告暟
    memsize = db.Column(db.Integer)  # 鍐呭瓨瀹归噺
    disksize = db.Column(db.String(64))
    business = db.Column(db.String(64))  # 所属业务
    powerstatus = db.Column(db.Integer)  # 电源状态
    onstatus = db.Column(db.Integer)  # 浣跨敤鐘舵��
    usedept = db.Column(db.String(64))  # 浣跨敤閮ㄩ棬
    usestaff = db.Column(db.String(64))  # 閮ㄩ棬浣跨敤浜�
    mainuses = db.Column(db.String(128))  # 涓昏鐢ㄩ��
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    remarks = db.Column(db.Text)  # 澶囨敞
    def __repr__(self):
        return '<Device %r>' % self.hostname
class ModuleClass(db.Model):
    __tablename__ = 'moduleClass'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    separator = db.Column(db.String(64), default='\t')
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    remarks = db.Column(db.Text)  # 澶囨敞
    def __repr__(self):
        return '<ModuleClass %r>' % self.name
class TaskClass(db.Model):
    __tablename__ = 'taskClass'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    module_id = db.Column(db.Integer, db.ForeignKey('moduleClass.id'))
    separator = db.Column(db.String(64), default='  ')
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    remarks = db.Column(db.Text)  # 澶囨敞
    def __repr__(self):
        return '<TaskClass %r>' % self.name
TaskRelationshipTaskGroup = db.Table('TaskRelationshipTaskGroup',
                                     db.Column('deviceTaskGroup_id', db.Integer, db.ForeignKey('deviceTaskGroup.id')),
                                     db.Column('deviceTask_id', db.Integer, db.ForeignKey('deviceTasks.id')),
                                     db.Column('PQ', db.Integer)
                                     )
class DeviceTaskGroup(db.Model):
    __tablename__ = 'deviceTaskGroup'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    enabled = db.Column(db.Boolean)  # 是否启用
    type = db.Column(db.Integer)
    tasks = db.relationship('DeviceTasks', secondary=TaskRelationshipTaskGroup,
                            backref=db.backref('taskGroup', lazy='dynamic'), lazy='dynamic')
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 褰曞叆浜�
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 褰曞叆鏃堕棿
    remarks = db.Column(db.Text)  # 澶囨敞
    def __repr__(self):
        return '<TaskScripts %r>' % self.name
class DeviceTasks(db.Model):
    __tablename__ = 'deviceTasks'
    id = db.Column(db.Integer, primary_key=True)
    taskname = db.Column(db.String(64))  # 任务名称
    scriptname = db.Column(db.String(256))  # 脚本名称
    type = db.Column(db.Integer)  # 脚本类型   python  shell  playbook  perl
    arch = db.Column(db.Integer)  # 系统架构   避免脚本运行出错
    md5code = db.Column(db.String(128))  # 脚本md5码   防止被修改
    path = db.Column(db.String(256))  # 脚本uuid
    extra_vars = db.Column(db.Text)
    version = db.Column(db.String(20))  # 脚本版本
    enabled = db.Column(db.Boolean)  # 启用
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
    def __repr__(self):
        return '<TaskScripts %r>' % self.taskname
class System(db.Model):
    __tablename__ = 'System'
    id = db.Column(db.Integer, primary_key=True)
    device_id = db.Column(db.Integer)  # 设备id  通过cmdb读取设备
    an = db.Column(db.String(64))
    sn = db.Column(db.String(64))
    ip = db.Column(db.String(20))  # 设备IP地址 选择设备后自动获取设备IP  在正式安装时判断是否与设备一致
    hostname = db.Column(db.String(64))
    power_ip = db.Column(db.String(32))
    os_version = db.Column(db.String(64))  # 系统版本
    type = db.Column(db.Integer)  # 部署方式  IPMI  DHCP
    post = db.Column(db.Integer, db.ForeignKey('deviceTaskGroup.id'))  # 任务列表   安装系统后需要执行的
    status = db.Column(db.Integer, default=1)
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
    def __repr__(self):
        return '<TaskScripts %r>' % self.sn
class ComplianceTasks(db.Model):
    __tablename__ = 'Compliance'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    deviceGroup = db.Column(db.Integer, db.ForeignKey('deviceGroup.id'))
    taskGroup = db.Column(db.Integer, db.ForeignKey('deviceTaskGroup.id'))
    status = db.Column(db.Integer, default=1)
    enabled = db.Column(db.Boolean)
    isdelete = db.Column(db.Boolean)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
class ComplianceRecord(db.Model):
    __tablename__ = 'ComplianceRecord'
    id = db.Column(db.Integer, primary_key=True)
SoftwareRelationshipDevice = db.Table('SoftwareRelationshipDevice',
                                      db.Column('SoftwareDistribution_id', db.Integer,
                                                db.ForeignKey('SoftwareDistribution.id')),
                                      db.Column('device_id', db.Integer, db.ForeignKey('devices.id'))
                                      )
class SoftwareDistribution(db.Model):
    __tablename__ = 'SoftwareDistribution'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64), unique=True)
    devices = db.relationship('Device', secondary=SoftwareRelationshipDevice, lazy='dynamic')
    taskGroup = db.Column(db.Integer, db.ForeignKey('deviceTaskGroup.id'))
    type = db.Column(db.Integer)
    status = db.Column(db.Integer)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
ContrastTaskRelationshipContrastFilesOrDirectory = db.Table('ContrastTaskRelationshipContrastFilesOrDirectory',
                                      db.Column('ContrastTask_id', db.Integer,
                                                db.ForeignKey('ContrastTasks.id')),
                                      db.Column('ContrastFilesOrDirectory', db.Integer, db.ForeignKey('ContrastFilesOrDirectory.id'))
                                      )
class ContrastTasks(db.Model):
    __tablename__ = 'ContrastTasks'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    fileOrDirectory = db.relationship('ContrastFilesOrDirectory', secondary=ContrastTaskRelationshipContrastFilesOrDirectory,
                            backref=db.backref('tasks', lazy='dynamic'), lazy='dynamic')
    type = db.Column(db.Integer)
    enabled = db.Column(db.Boolean)
    status = db.Column(db.Integer)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
class ContrastFilesOrDirectory(db.Model):
    __tablename__ = 'ContrastFilesOrDirectory'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    device_id = db.Column(db.ForeignKey('devices.id'))
    type = db.Column(db.Integer)        # 对比类型,   文件或目录
    path = db.Column(db.String(512))
    enabled = db.Column(db.String(64))
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
class ContrastResults(db.Model):
    __tablename__ = 'ContrastResults'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64))
    task = db.Column(db.ForeignKey('ContrastTasks.id'))
    template = db.Column(db.String(512))
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
ApplicationOfReleaseRelationshipDevice = db.Table('ApplicationOfReleaseRelationshipDevice',
                                      db.Column('ApplicationOfRelease_id', db.Integer,
                                                db.ForeignKey('ApplicationOfRelease.id')),
                                      db.Column('device_id', db.Integer, db.ForeignKey('devices.id'))
                                      )
class ApplicationOfRelease(db.Model):
    __tablename__ = 'ApplicationOfRelease'
    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(64), unique=True)
    devices = db.relationship('Device', secondary=ApplicationOfReleaseRelationshipDevice, lazy='dynamic')
    taskGroup = db.Column(db.Integer, db.ForeignKey('deviceTaskGroup.id'))
    type = db.Column(db.Integer)
    status = db.Column(db.Integer)
    instaff = db.Column(db.String(64))  # 录入人
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
class histroyCommands(db.Model):
    __tablename__ = 'histroyCommands'
    id = db.Column(db.Integer, primary_key=True)
    user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
    command = db.Column(db.String(512))  # 命令记录
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    remarks = db.Column(db.Text)  # 备注
class pushCommandLogger(db.Model):
    __tablename__ = 'pushCommandLogger'
    id = db.Column(db.Integer, primary_key=True)
    device_id = db.Column(db.Integer, db.ForeignKey('devices.id'))
    command = db.Column(db.Text)
    inputtime = db.Column(db.DateTime, default=datetime.now)  # 录入时间
    def __repr__(self):
        return '<command %r>' % self.command
class Logger(db.Model):
    __tablename__ = 'logs'
    id = db.Column(db.Integer, primary_key=True)
    user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
    logtime = db.Column(db.DateTime, default=datetime.now())
    content = db.Column(db.String(256))
    # action  [ 1: add , 2: edit, 3: del ]
    action = db.Column(db.String(32))
    logobjtype = db.Column(db.String(64))
    logobj_id = db.Column(db.Integer)
    def __repr__(self):
        return '<Logs %r>' % self.user_id
@login_manager.user_loader
def load_user(user_id):
    return User.query.get(int(user_id))
 | 
	apache-2.0 | -4,587,166,439,716,651,500 | 34.864823 | 133 | 0.588358 | false | 
| 
	gbjuno/coreprogramming | 
	chapter7/7-6.py | 
	1 | 
	1400 | 
	#!/usr/bin/env python
def stockDataSort(stockData,userInput):
    stockDataAfterSort = {}
    if userInput == None:
        print "input wrong items"
        exit(1)
    else:
        for i in stockData:
            stockDataAfterSort[i[userInput]] = i
    print "-----------------------------------"
    for dataItem in sorted(stockDataAfterSort):
        print "%s : %s" % (dataItem,stockDataAfterSort[dataItem])
    print "-----------------------------------"
    return stockDataAfterSort
        
def main():
    stockData = [
        ['1000',1.6,1.7,1000],
        ['3023',101.5,0,0],
        ['0032',300.1,298,300],
        ['2032',30.2,40,60000]
    ]
    userInputMap = {"number":0,"currentPrice":1,"buyPrice":2,"stockNumber":3}
    
    userInput = "number"
    print "stockDataAfterSort is %s " % stockDataSort(stockData,userInputMap.get(userInput))
    userInput = "currentPrice"
    print "stockDataAfterSort is %s " % stockDataSort(stockData,userInputMap.get(userInput))
    
    userInput = "buyPrice"
    print "stockDataAfterSort is %s " % stockDataSort(stockData,userInputMap.get(userInput))
    
    userInput = "stockNumber"
    print "stockDataAfterSort is %s " % stockDataSort(stockData,userInputMap.get(userInput))
    userInput = "haha"
    print "stockDataAfterSort is %s " % stockDataSort(stockData,userInputMap.get(userInput))
    
if __name__ == '__main__':
    main()
 | 
	mit | 1,574,336,147,322,682,400 | 32.333333 | 92 | 0.604286 | false | 
| 
	futurice/vdsm | 
	vdsm/network/configurators/__init__.py | 
	1 | 
	5484 | 
	# Copyright 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import ConfigParser
import logging
from vdsm import netinfo
from vdsm.config import config
from vdsm.netconfpersistence import RunningConfig
from . import libvirt
from ..models import Bond, Bridge
from ..sourceroute import StaticSourceRoute
class RollbackIncomplete(Exception):
    pass
class Configurator(object):
    def __init__(self, configApplier, inRollback=False):
        self.configApplier = configApplier
        self._inRollback = inRollback
        self._libvirtAdded = set()
    def __enter__(self):
        self.begin()
        return self
    def __exit__(self, type, value, traceback):
        if type is None:
            self.commit()
        elif self._inRollback:
            # If we failed the rollback transaction, the networking system
            # is in no good state and we fail hard
            logging.error('Failed rollback transaction last known good '
                          'network. ERR=%s', exc_info=(type, value, traceback))
        else:
            leftover = self.rollback()
            if leftover:
                raise RollbackIncomplete(leftover, type, value)
    def rollback(self):
        """
        returns None when all the nets were successfully rolled back, a
        vdsm.netoconfpersistence.Config object with the not yet rolled back
        networks and bonds.
        """
        # self.runningConfig will have all the changes that were applied before
        # we needed to rollback.
        return RunningConfig().diffFrom(self.runningConfig)
    def flush(self):
        libvirt.flush()
    def configureBridge(self, bridge, **opts):
        raise NotImplementedError
    def configureVlan(self, vlan, **opts):
        raise NotImplementedError
    def configureBond(self, bond, **opts):
        raise NotImplementedError
    def editBonding(self, bond, _netinfo):
        raise NotImplementedError
    def configureNic(self, nic, **opts):
        raise NotImplementedError
    def removeBridge(self, bridge):
        raise NotImplementedError
    def removeVlan(self, vlan):
        raise NotImplementedError
    def removeBond(self, bonding):
        raise NotImplementedError
    def removeNic(self, nic):
        raise NotImplementedError
    def configureSourceRoute(self, routes, rules, device):
        raise NotImplementedError
    def removeSourceRoute(self, routes, rules, device):
        raise NotImplementedError
    def configureLibvirtNetwork(self, network, iface, qosInbound=None,
                                qosOutbound=None):
        self.configApplier.createLibvirtNetwork(network,
                                                isinstance(iface, Bridge),
                                                iface.name,
                                                qosInbound=qosInbound,
                                                qosOutbound=qosOutbound)
        self._libvirtAdded.add(network)
    def removeLibvirtNetwork(self, network):
        self.configApplier.removeLibvirtNetwork(network)
    def _addSourceRoute(self, netEnt):
        ip = netEnt.ipConfig
        # bootproto is None for both static and no bootproto
        if ip.bootproto != 'dhcp' and netEnt.master is None:
            logging.debug("Adding source route %s, %s, %s, %s" %
                          (netEnt.name, ip.ipaddr, ip.netmask, ip.gateway))
            StaticSourceRoute(netEnt.name, self).\
                configure(ip.ipaddr, ip.netmask, ip.gateway)
    def _removeSourceRoute(self, netEnt, sourceRouteClass):
        if netEnt.ipConfig.bootproto != 'dhcp' and netEnt.master is None:
            logging.debug("Removing source route for device %s", netEnt.name)
            sourceRouteClass(netEnt.name, self).remove()
    def _setNewMtu(self, iface, ifaceVlans):
        """
        Update an interface's MTU when one of its users is removed.
        :param iface: interface object (bond or nic device)
        :type iface: NetDevice instance
        :param ifaceVlans: vlan devices using the interface 'iface'
        :type ifaceVlans: iterable
        :return mtu value that was applied
        """
        ifaceMtu = netinfo.getMtu(iface.name)
        maxMtu = netinfo.getMaxMtu(ifaceVlans, None)
        if maxMtu and maxMtu < ifaceMtu:
            if isinstance(iface, Bond):
                self.configApplier.setBondingMtu(iface.name, maxMtu)
            else:
                self.configApplier.setIfaceMtu(iface.name, maxMtu)
        return maxMtu
def getEthtoolOpts(name):
    try:
        opts = config.get('vars', 'ethtool_opts.' + name)
    except ConfigParser.NoOptionError:
        opts = config.get('vars', 'ethtool_opts')
    return opts
 | 
	gpl-2.0 | 1,205,282,231,433,251,600 | 33.708861 | 79 | 0.642232 | false | 
| 
	dhermes/ncaa-bracket-scenarios | 
	run_all_scenarios.py | 
	1 | 
	1579 | 
	from __future__ import print_function
import itertools
import json
import pickle
from game_tree_classes import WinnerOf
import utils
with open(utils.SWEET16_PICKLE, 'r') as fh:
    SLOTS_BEFORE = pickle.load(fh)
def complete_bracket(game_slots, choice_slots, choices):
    result = game_slots.copy()
    for slot_id, choice_val in zip(choice_slots, choices):
        winner_of = result.get_slot(slot_id)
        if choice_val not in (winner_of.game_slot1, winner_of.game_slot2):
            raise ValueError('Choice does not match available.')
        winning_team = result.get_slot(choice_val)
        result.reset_slot(slot_id, winning_team)
    if not result.complete:
        raise ValueError('Expected bracket to be complete.')
    return result.reduced
def main():
    choice_slots = []
    choice_vals = []
    for slot_id in xrange(127):
        value = SLOTS_BEFORE.get_slot(slot_id)
        if isinstance(value, WinnerOf):
            choice_slots.append(slot_id)
            choice_vals.append((value.game_slot1, value.game_slot2))
    msg = '{:d} choices left'.format(len(choice_slots))
    print(msg)
    reduced_vals = []
    for choice_tuple in itertools.product(*choice_vals):
        reduced_vals.append(
            complete_bracket(SLOTS_BEFORE, choice_slots, choice_tuple))
    filename = utils.REDUCED_SCENARIOS
    with open(filename, 'w') as fh:
        json.dump(reduced_vals, fh, indent=2, sort_keys=True,
                  separators=(',', ': '))
    msg = 'Created {}'.format(filename)
    print(msg)
if __name__ == '__main__':
    main()
 | 
	apache-2.0 | -6,612,150,762,153,403,000 | 28.240741 | 74 | 0.639012 | false | 
| 
	Gaia3D/QGIS | 
	python/plugins/processing/algs/qgis/DeleteHoles.py | 
	1 | 
	2935 | 
	# -*- coding: utf-8 -*-
"""
***************************************************************************
    DeleteHoles.py
    ---------------------
    Date                 : April 2015
    Copyright            : (C) 2015 by Etienne Trimaille
***************************************************************************
*                                                                         *
*   This program is free software; you can redistribute it and/or modify  *
*   it under the terms of the GNU General Public License as published by  *
*   the Free Software Foundation; either version 2 of the License, or     *
*   (at your option) any later version.                                   *
*                                                                         *
***************************************************************************
"""
__author__ = 'Etienne Trimaille'
__date__ = 'April 2015'
__copyright__ = '(C) 2015, Etienne Trimaille'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class DeleteHoles(GeoAlgorithm):
    INPUT = 'INPUT'
    OUTPUT = 'OUTPUT'
    def defineCharacteristics(self):
        self.name = 'Delete holes'
        self.group = 'Vector geometry tools'
        self.addParameter(ParameterVector(self.INPUT,
            self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_POLYGON]))
        self.addOutput(OutputVector(self.OUTPUT, self.tr('Output')))
    def processAlgorithm(self, progress):
        layer = dataobjects.getObjectFromUri(
            self.getParameterValue(self.INPUT))
        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            layer.pendingFields(),
            layer.wkbType(),
            layer.crs())
        features = vector.features(layer)
        count = len(features)
        total = 100.0 / float(count)
        feat = QgsFeature()
        for count, f in enumerate(features):
            geometry = f.geometry()
            if geometry.isMultipart():
                multi_polygon = geometry.asMultiPolygon()
                for polygon in multi_polygon:
                    for ring in polygon[1:]:
                        polygon.remove(ring)
                geometry = QgsGeometry.fromMultiPolygon(multi_polygon)
            else:
                polygon = geometry.asPolygon()
                for ring in polygon[1:]:
                    polygon.remove(ring)
                geometry = QgsGeometry.fromPolygon(polygon)
            feat.setGeometry(geometry)
            feat.setAttributes(f.attributes())
            writer.addFeature(feat)
            progress.setPercentage(int(count * total))
        del writer | 
	gpl-2.0 | -8,316,699,685,901,509,000 | 35.246914 | 75 | 0.527768 | false | 
| 
	smira/spamfighter | 
	spamfighter/core/null_partner.py | 
	1 | 
	3199 | 
	# -*- coding: utf-8 -*-
#
# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, [email protected])
#
# This file is part of SpamFighter.
#
# SpamFighter is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpamFighter is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SpamFighter.  If not, see <http://www.gnu.org/licenses/>.
#
"""
Модуль авторизации партнеров без логинов/паролей (на доверии).
"""
from zope.interface import implements
from twisted.internet import defer
from spamfighter.interfaces import IPartner, IPartnerAuthorizer
from spamfighter.core.partner import PartnerAuthorizationFailedError
from spamfighter.core.domain import getDefaultDomain, BaseDomain
from spamfighter.plugin import loadPlugin, IDefaultDomainProvider
from spamfighter.utils import config
class NullPartner(object):
    """
    Партнер, авторизованный без логина/пароля (на доверии).
    @ivar domain: корневой домен партнера
    @type domain: L{BaseDomain}
    """
    implements(IPartner)
    def __init__(self):
        """
        Конструктор.
        """
        domainProvider = loadPlugin(IDefaultDomainProvider, config.plugins.domain.null_partner_domain_provider)
        self.domain = domainProvider.getDefaultDomain()
    def rootDomain(self):
        """
        Получить корневой домен партнера.
        @return: Deferred, корневой домен (L{IDomain})
        @rtype: C{twisted.internet.defer.Deferred} 
        """
        return defer.succeed(self.domain)
class NullPartnerAuthorizer(object):
    """
    Провайдер авторизации партнеров без логина/пароля (на доверии).
    В этой ситуации доступ к СпамоБорцу ограничен с помощью других средств
    (HTTP-proxy, firewall).
    @ivar partner: единственный партнер, который обеспечивает весь доступ
    @type partner: L{NullPartner}
    """
    implements(IPartnerAuthorizer)
    def __init__(self):
        """
        Конструктор.
        """
        self.partner = NullPartner()
    def authorize(self, partner_info):
        """
        Выполнить авторизацию партнера.
        @param partner_info: информация о партнере
        @return: Deferred, партнер (L{IPartner})
        @rtype: C{twisted.internet.defer.Deferred} 
        """
        if partner_info is not None:
            return defer.fail(PartnerAuthorizationFailedError())
        return defer.succeed(self.partner)
 | 
	gpl-3.0 | -2,622,230,818,337,814,000 | 30.840909 | 111 | 0.70414 | false | 
| 
	lowitty/eeep | 
	insertdb/insertmodeldata.py | 
	1 | 
	6340 | 
	#encoding=utf-8
import MySQLdb, os
from django.core.checks.registry import Tags
def insertDomain(db):
    cursor = db.cursor()
    sql = "insert into %(table)s (%(para)s) values ('%(value)s')"
    insertValues = {'table' : 'quotation_domain', 'para' : 'name', 'value' : 'CD'}
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + 'domain', 'r+')
    for line in f.readlines():
        line = line.strip()
        if('' != line):
            insertValues['value'] = line
            exesql = sql % insertValues
            cursor.execute(exesql)
    db.commit()
    db.close()
    
def insertSubDomain(db):
    cursor = db.cursor()
    sql = "insert into %(table)s (%(para)s) values ('%(value)s')"
    insertValues = {'table' : 'quotation_domain', 'para' : 'name', 'value' : 'CD'}
    insertValues['table'] = 'quotation_subdomain'
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + 'subdomain', 'r+')
    for line in f.readlines():
        line = line.strip()
        if('' != line):
            insertValues['value'] = line
            exesql = sql % insertValues
            cursor.execute(exesql)
    db.commit()
    db.close()
    
def insertRegion(db, tableName, valueTag, fileName):
    cursor = db.cursor()
    sql = "insert into %(table)s (%(valueTag)s) values ('%(value)s')"
    insertValues = {'table' : tableName, 'valueTag' : valueTag, 'value' : 'xxxxx'}
    #print sql % insertValues
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
    for line in f.readlines():
        line = line.strip()
        if('' != line):
            para = line.split('**')
            if(len(para) > 1):
                insertValues['value'] = para[0].strip()
                cursor.execute(sql % insertValues)
    db.commit()
    db.close()
def insertValuesWithForignKey(db, table, tagValue, tagForKey, f_table, f_tagvalue, fileName = 'unitid'):
    cursor = db.cursor()
    sql = "insert into %(table)s (" + tagValue + "," + tagForKey + ") values ('%(" + tagValue + ")s', %(" + tagForKey + ")s)"
    insertValues = {'table' : table, tagValue : 'OMS CD', tagForKey : 1}
    
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
    
    f_id = -1
    exeTimes = 0
    for line in f.readlines():
        exeTimes += 1
        line = line.strip()
        if('' != line):
            para = line.split('**')
            if(len(para) > 1):
                f_name = para[0].strip()
                cursor.execute("select id from %s where %s='%s'" % (f_table, f_tagvalue, f_name))
                f_id = cursor.fetchone()[0]
                insertValues[tagValue] = para[1].strip().replace('\'', "\\'")
                insertValues[tagForKey] = f_id
                print sql % insertValues
            else:
                insertValues[tagValue] = para[0].strip().replace('\'', "\\'")
                insertValues[tagForKey] = f_id
                print sql % insertValues
            cursor.execute(sql % insertValues)
        
        if(exeTimes % 10 == 0):
            db.commit()
            #pass
    db.commit()
    db.close()
def insertWorkcenter(db, tableName, fileName, *tags):
    if(4 != len(tags)):
        return False
    else:
        cursor = db.cursor()
        sql = "insert into %(tableName)s (" + tags[0] + "," + tags[1] + "," + tags[2] + "," + tags[3] + ") values ('%(" + tags[0] + ")s','%(" + tags[1] + ")s','%(" + tags[2] + ")s','%("+ tags[3] +")s')".encode('utf-8')
        insertDatas = {
                       'tableName' : tableName,
                       tags[0] : '',
                       tags[1] : '',
                       tags[2] : '',
                       tags[3] : ''
                       }
        f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
        
        cIndex = 0
        for line in f.readlines():
            cIndex += 1
            if('' != line):
                para = line.split('**')
                if(len(para) > 3):
                    insertDatas[tags[0]] = para[0].strip().replace("\'", "\\'").encode('utf-8')
                    insertDatas[tags[1]] = para[1].strip().replace("\'", "\\'").encode('utf-8')
                    insertDatas[tags[2]] = para[2].strip().replace("\'", "\\'").encode('utf-8')
                    insertDatas[tags[3]] = para[3].strip().replace("\'", "\\'").encode('utf-8')
                    #print (sql % insertDatas).encode('utf-8')
                    cursor.execute((sql % insertDatas).encode('utf-8'))
            if(cIndex % 10 == 0):
                db.commit()
        db.commit()
        db.close()
def insertPostatus(db, fileName):
    cursor = db.cursor()
    sql = "insert into quotation_postatus (name) values ('%s')"
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
    for line in f.readlines():
        line = line.strip()
        if('' != line):
            exesql = sql % line
            cursor.execute(exesql)
    db.commit()
    db.close()
def insertOrderingcompany(db, fileName):
    cursor = db.cursor()
    sql = "insert into quotation_orderingcompany (name) values ('%s')"
    f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
    
    cIndex = 0
    for line in f.readlines():
        cIndex += 1
        line = line.strip()
        if('' != line):
            exesql = sql % line
            #print exesql
            cursor.execute(exesql)
        if( 0 == cIndex % 10):
            db.commit()
    db.commit()
    db.close()
if __name__ == '__main__':
    host = "localhost"
    passwd = "tatool"
    user = "tatool"
    dbname = "eeep"
    db = MySQLdb.connect(host=host, user=user, passwd=passwd, db=dbname)
    
    #insertDomain(db)
    #insertSubDomain(db)
    #insertValuesWithForignKey(db, 'quotation_unitid', 'name', 'domain_id', "quotation_domain", "name")
    #insertRegion(db, 'quotation_region', 'name', 'regionandcountry')
    #insertValuesWithForignKey(db, 'quotation_country', 'name', 'region_id', "quotation_region", "name", 'regionandcountry')
    #insertWorkcenter(db, 'quotation_workcenter', 'workcenter', 'number', 'descworkcenter', 'icrrbactivitytype', 'intracompanyactivitytyoe')
    #insertPostatus(db, 'postatus')
    insertOrderingcompany(db, 'orderingcompany')
    
    
     | 
	mit | -8,395,665,122,548,171,000 | 37.90184 | 218 | 0.519085 | false | 
| 
	xen0l/ansible | 
	lib/ansible/modules/cloud/vmware/vmware_vm_facts.py | 
	1 | 
	5877 | 
	#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
    'metadata_version': '1.1',
    'status': ['preview'],
    'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_vm_facts
short_description: Return basic facts pertaining to a vSphere virtual machine guest
description:
- Return basic facts pertaining to a vSphere virtual machine guest.
version_added: '2.0'
author:
- Joseph Callen (@jcpowermac)
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 5.5 and vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
    vm_type:
      description:
      - If set to C(vm), then facts are gathered for virtual machines only.
      - If set to C(template), then facts are gathered for virtual machine templates only.
      - If set to C(all), then facts are gathered for all virtual machines and virtual machine templates.
      required: False
      default: 'all'
      choices: [ all, vm, template ]
      version_added: 2.5
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather all registered virtual machines
  vmware_vm_facts:
    hostname: '{{ vcenter_hostname }}'
    username: '{{ vcenter_username }}'
    password: '{{ vcenter_password }}'
  delegate_to: localhost
  register: vmfacts
- debug:
    var: vmfacts.virtual_machines
- name: Gather only registered virtual machine templates
  vmware_vm_facts:
    hostname: '{{ vcenter_hostname }}'
    username: '{{ vcenter_username }}'
    password: '{{ vcenter_password }}'
    vm_type: template
  delegate_to: localhost
  register: template_facts
- debug:
    var: template_facts.virtual_machines
- name: Gather only registered virtual machines
  vmware_vm_facts:
    hostname: '{{ vcenter_hostname }}'
    username: '{{ vcenter_username }}'
    password: '{{ vcenter_password }}'
    vm_type: vm
  delegate_to: localhost
  register: vm_facts
- debug:
    var: vm_facts.virtual_machines
'''
RETURN = r'''
virtual_machines:
  description: dictionary of virtual machines and their facts
  returned: success
  type: dict
'''
try:
    from pyVmomi import vim
except ImportError:
    pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, get_all_objs, vmware_argument_spec, _get_vm_prop
class VmwareVmFacts(PyVmomi):
    def __init__(self, module):
        super(VmwareVmFacts, self).__init__(module)
    # https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/getallvms.py
    def get_all_virtual_machines(self):
        """
        Function to get all virtual machines and related configurations information
        """
        virtual_machines = get_all_objs(self.content, [vim.VirtualMachine])
        _virtual_machines = {}
        for vm in virtual_machines:
            _ip_address = ""
            summary = vm.summary
            if summary.guest is not None:
                _ip_address = summary.guest.ipAddress
                if _ip_address is None:
                    _ip_address = ""
            _mac_address = []
            all_devices = _get_vm_prop(vm, ('config', 'hardware', 'device'))
            if all_devices:
                for dev in all_devices:
                    if isinstance(dev, vim.vm.device.VirtualEthernetCard):
                        _mac_address.append(dev.macAddress)
            net_dict = {}
            vmnet = _get_vm_prop(vm, ('guest', 'net'))
            if vmnet:
                for device in vmnet:
                    net_dict[device.macAddress] = dict()
                    net_dict[device.macAddress]['ipv4'] = []
                    net_dict[device.macAddress]['ipv6'] = []
                    for ip_addr in device.ipAddress:
                        if "::" in ip_addr:
                            net_dict[device.macAddress]['ipv6'].append(ip_addr)
                        else:
                            net_dict[device.macAddress]['ipv4'].append(ip_addr)
            esxi_hostname = None
            if summary.runtime.host:
                esxi_hostname = summary.runtime.host.summary.config.name
            virtual_machine = {
                summary.config.name: {
                    "guest_fullname": summary.config.guestFullName,
                    "power_state": summary.runtime.powerState,
                    "ip_address": _ip_address,  # Kept for backward compatibility
                    "mac_address": _mac_address,  # Kept for backward compatibility
                    "uuid": summary.config.uuid,
                    "vm_network": net_dict,
                    "esxi_hostname": esxi_hostname,
                }
            }
            vm_type = self.module.params.get('vm_type')
            is_template = _get_vm_prop(vm, ('config', 'template'))
            if vm_type == 'vm' and not is_template:
                _virtual_machines.update(virtual_machine)
            elif vm_type == 'template' and is_template:
                _virtual_machines.update(virtual_machine)
            elif vm_type == 'all':
                _virtual_machines.update(virtual_machine)
        return _virtual_machines
def main():
    argument_spec = vmware_argument_spec()
    argument_spec.update(
        vm_type=dict(type='str', choices=['vm', 'all', 'template'], default='all'),
    )
    module = AnsibleModule(argument_spec=argument_spec,
                           supports_check_mode=False)
    vmware_vm_facts = VmwareVmFacts(module)
    _virtual_machines = vmware_vm_facts.get_all_virtual_machines()
    module.exit_json(changed=False, virtual_machines=_virtual_machines)
if __name__ == '__main__':
    main()
 | 
	gpl-3.0 | -3,481,050,722,794,838,500 | 31.832402 | 105 | 0.602008 | false | 
| 
	googleapis/googleapis-gen | 
	google/cloud/dialogflow/cx/v3beta1/dialogflow-cx-v3beta1-py/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py | 
	1 | 
	38725 | 
	# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib  # type: ignore
from google.api_core import exceptions as core_exceptions         # type: ignore
from google.api_core import gapic_v1                              # type: ignore
from google.api_core import retry as retries                      # type: ignore
from google.auth import credentials as ga_credentials             # type: ignore
from google.auth.transport import mtls                            # type: ignore
from google.auth.transport.grpc import SslCredentials             # type: ignore
from google.auth.exceptions import MutualTLSChannelError          # type: ignore
from google.oauth2 import service_account                         # type: ignore
from google.cloud.dialogflowcx_v3beta1.services.entity_types import pagers
from google.cloud.dialogflowcx_v3beta1.types import entity_type
from google.cloud.dialogflowcx_v3beta1.types import entity_type as gcdc_entity_type
from google.protobuf import field_mask_pb2  # type: ignore
from .transports.base import EntityTypesTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import EntityTypesGrpcTransport
from .transports.grpc_asyncio import EntityTypesGrpcAsyncIOTransport
class EntityTypesClientMeta(type):
    """Metaclass for the EntityTypes client.
    This provides class-level methods for building and retrieving
    support objects (e.g. transport) without polluting the client instance
    objects.
    """
    _transport_registry = OrderedDict()  # type: Dict[str, Type[EntityTypesTransport]]
    _transport_registry["grpc"] = EntityTypesGrpcTransport
    _transport_registry["grpc_asyncio"] = EntityTypesGrpcAsyncIOTransport
    def get_transport_class(cls,
            label: str = None,
        ) -> Type[EntityTypesTransport]:
        """Returns an appropriate transport class.
        Args:
            label: The name of the desired transport. If none is
                provided, then the first transport in the registry is used.
        Returns:
            The transport class to use.
        """
        # If a specific transport is requested, return that one.
        if label:
            return cls._transport_registry[label]
        # No transport is requested; return the default (that is, the first one
        # in the dictionary).
        return next(iter(cls._transport_registry.values()))
class EntityTypesClient(metaclass=EntityTypesClientMeta):
    """Service for managing
    [EntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityType].
    """
    @staticmethod
    def _get_default_mtls_endpoint(api_endpoint):
        """Converts api endpoint to mTLS endpoint.
        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
        Args:
            api_endpoint (Optional[str]): the api endpoint to convert.
        Returns:
            str: converted mTLS api endpoint.
        """
        if not api_endpoint:
            return api_endpoint
        mtls_endpoint_re = re.compile(
            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
        )
        m = mtls_endpoint_re.match(api_endpoint)
        name, mtls, sandbox, googledomain = m.groups()
        if mtls or not googledomain:
            return api_endpoint
        if sandbox:
            return api_endpoint.replace(
                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
            )
        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
    DEFAULT_ENDPOINT = "dialogflow.googleapis.com"
    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
        DEFAULT_ENDPOINT
    )
    @classmethod
    def from_service_account_info(cls, info: dict, *args, **kwargs):
        """Creates an instance of this client using the provided credentials
            info.
        Args:
            info (dict): The service account private key info.
            args: Additional arguments to pass to the constructor.
            kwargs: Additional arguments to pass to the constructor.
        Returns:
            EntityTypesClient: The constructed client.
        """
        credentials = service_account.Credentials.from_service_account_info(info)
        kwargs["credentials"] = credentials
        return cls(*args, **kwargs)
    @classmethod
    def from_service_account_file(cls, filename: str, *args, **kwargs):
        """Creates an instance of this client using the provided credentials
            file.
        Args:
            filename (str): The path to the service account private key json
                file.
            args: Additional arguments to pass to the constructor.
            kwargs: Additional arguments to pass to the constructor.
        Returns:
            EntityTypesClient: The constructed client.
        """
        credentials = service_account.Credentials.from_service_account_file(
            filename)
        kwargs["credentials"] = credentials
        return cls(*args, **kwargs)
    from_service_account_json = from_service_account_file
    @property
    def transport(self) -> EntityTypesTransport:
        """Returns the transport used by the client instance.
        Returns:
            EntityTypesTransport: The transport used by the client
                instance.
        """
        return self._transport
    @staticmethod
    def entity_type_path(project: str,location: str,agent: str,entity_type: str,) -> str:
        """Returns a fully-qualified entity_type string."""
        return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format(project=project, location=location, agent=agent, entity_type=entity_type, )
    @staticmethod
    def parse_entity_type_path(path: str) -> Dict[str,str]:
        """Parses a entity_type path into its component segments."""
        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/agents/(?P<agent>.+?)/entityTypes/(?P<entity_type>.+?)$", path)
        return m.groupdict() if m else {}
    @staticmethod
    def common_billing_account_path(billing_account: str, ) -> str:
        """Returns a fully-qualified billing_account string."""
        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
    @staticmethod
    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
        """Parse a billing_account path into its component segments."""
        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
        return m.groupdict() if m else {}
    @staticmethod
    def common_folder_path(folder: str, ) -> str:
        """Returns a fully-qualified folder string."""
        return "folders/{folder}".format(folder=folder, )
    @staticmethod
    def parse_common_folder_path(path: str) -> Dict[str,str]:
        """Parse a folder path into its component segments."""
        m = re.match(r"^folders/(?P<folder>.+?)$", path)
        return m.groupdict() if m else {}
    @staticmethod
    def common_organization_path(organization: str, ) -> str:
        """Returns a fully-qualified organization string."""
        return "organizations/{organization}".format(organization=organization, )
    @staticmethod
    def parse_common_organization_path(path: str) -> Dict[str,str]:
        """Parse a organization path into its component segments."""
        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
        return m.groupdict() if m else {}
    @staticmethod
    def common_project_path(project: str, ) -> str:
        """Returns a fully-qualified project string."""
        return "projects/{project}".format(project=project, )
    @staticmethod
    def parse_common_project_path(path: str) -> Dict[str,str]:
        """Parse a project path into its component segments."""
        m = re.match(r"^projects/(?P<project>.+?)$", path)
        return m.groupdict() if m else {}
    @staticmethod
    def common_location_path(project: str, location: str, ) -> str:
        """Returns a fully-qualified location string."""
        return "projects/{project}/locations/{location}".format(project=project, location=location, )
    @staticmethod
    def parse_common_location_path(path: str) -> Dict[str,str]:
        """Parse a location path into its component segments."""
        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
        return m.groupdict() if m else {}
    def __init__(self, *,
            credentials: Optional[ga_credentials.Credentials] = None,
            transport: Union[str, EntityTypesTransport, None] = None,
            client_options: Optional[client_options_lib.ClientOptions] = None,
            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
            ) -> None:
        """Instantiates the entity types client.
        Args:
            credentials (Optional[google.auth.credentials.Credentials]): The
                authorization credentials to attach to requests. These
                credentials identify the application to the service; if none
                are specified, the client will attempt to ascertain the
                credentials from the environment.
            transport (Union[str, EntityTypesTransport]): The
                transport to use. If set to None, a transport is chosen
                automatically.
            client_options (google.api_core.client_options.ClientOptions): Custom options for the
                client. It won't take effect if a ``transport`` instance is provided.
                (1) The ``api_endpoint`` property can be used to override the
                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
                environment variable can also be used to override the endpoint:
                "always" (always use the default mTLS endpoint), "never" (always
                use the default regular endpoint) and "auto" (auto switch to the
                default mTLS endpoint if client certificate is present, this is
                the default value). However, the ``api_endpoint`` property takes
                precedence if provided.
                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
                is "true", then the ``client_cert_source`` property can be used
                to provide client certificate for mutual TLS transport. If
                not provided, the default SSL client certificate will be used if
                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
                set, no client certificate will be used.
            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
                The client info used to send a user-agent string along with
                API requests. If ``None``, then default info will be used.
                Generally, you only need to set this if you're developing
                your own client library.
        Raises:
            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
                creation failed for any reason.
        """
        if isinstance(client_options, dict):
            client_options = client_options_lib.from_dict(client_options)
        if client_options is None:
            client_options = client_options_lib.ClientOptions()
        # Create SSL credentials for mutual TLS if needed.
        use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")))
        client_cert_source_func = None
        is_mtls = False
        if use_client_cert:
            if client_options.client_cert_source:
                is_mtls = True
                client_cert_source_func = client_options.client_cert_source
            else:
                is_mtls = mtls.has_default_client_cert_source()
                if is_mtls:
                    client_cert_source_func = mtls.default_client_cert_source()
                else:
                    client_cert_source_func = None
        # Figure out which api endpoint to use.
        if client_options.api_endpoint is not None:
            api_endpoint = client_options.api_endpoint
        else:
            use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
            if use_mtls_env == "never":
                api_endpoint = self.DEFAULT_ENDPOINT
            elif use_mtls_env == "always":
                api_endpoint = self.DEFAULT_MTLS_ENDPOINT
            elif use_mtls_env == "auto":
                if is_mtls:
                    api_endpoint = self.DEFAULT_MTLS_ENDPOINT
                else:
                    api_endpoint = self.DEFAULT_ENDPOINT
            else:
                raise MutualTLSChannelError(
                    "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
                    "values: never, auto, always"
                )
        # Save or instantiate the transport.
        # Ordinarily, we provide the transport, but allowing a custom transport
        # instance provides an extensibility point for unusual situations.
        if isinstance(transport, EntityTypesTransport):
            # transport is a EntityTypesTransport instance.
            if credentials or client_options.credentials_file:
                raise ValueError("When providing a transport instance, "
                                 "provide its credentials directly.")
            if client_options.scopes:
                raise ValueError(
                    "When providing a transport instance, provide its scopes "
                    "directly."
                )
            self._transport = transport
        else:
            Transport = type(self).get_transport_class(transport)
            self._transport = Transport(
                credentials=credentials,
                credentials_file=client_options.credentials_file,
                host=api_endpoint,
                scopes=client_options.scopes,
                client_cert_source_for_mtls=client_cert_source_func,
                quota_project_id=client_options.quota_project_id,
                client_info=client_info,
            )
    def list_entity_types(self,
            request: entity_type.ListEntityTypesRequest = None,
            *,
            parent: str = None,
            retry: retries.Retry = gapic_v1.method.DEFAULT,
            timeout: float = None,
            metadata: Sequence[Tuple[str, str]] = (),
            ) -> pagers.ListEntityTypesPager:
        r"""Returns the list of all entity types in the specified
        agent.
        Args:
            request (google.cloud.dialogflowcx_v3beta1.types.ListEntityTypesRequest):
                The request object. The request message for
                [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
            parent (str):
                Required. The agent to list all entity types for.
                Format:
                ``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
                This corresponds to the ``parent`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.
        Returns:
            google.cloud.dialogflowcx_v3beta1.services.entity_types.pagers.ListEntityTypesPager:
                The response message for
                [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
                Iterating over this object will yield results and
                resolve additional pages automatically.
        """
        # Create or coerce a protobuf request object.
        # Sanity check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([parent])
        if request is not None and has_flattened_params:
            raise ValueError('If the `request` argument is set, then none of '
                             'the individual field arguments should be set.')
        # Minor optimization to avoid making a copy if the user passes
        # in a entity_type.ListEntityTypesRequest.
        # There's no risk of modifying the input as we've already verified
        # there are no flattened fields.
        if not isinstance(request, entity_type.ListEntityTypesRequest):
            request = entity_type.ListEntityTypesRequest(request)
            # If we have keyword arguments corresponding to fields on the
            # request, apply these.
            if parent is not None:
                request.parent = parent
        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = self._transport._wrapped_methods[self._transport.list_entity_types]
        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (
            gapic_v1.routing_header.to_grpc_metadata((
                ("parent", request.parent),
            )),
        )
        # Send the request.
        response = rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
        # This method is paged; wrap the response in a pager, which provides
        # an `__iter__` convenience method.
        response = pagers.ListEntityTypesPager(
            method=rpc,
            request=request,
            response=response,
            metadata=metadata,
        )
        # Done; return the response.
        return response
    def get_entity_type(self,
            request: entity_type.GetEntityTypeRequest = None,
            *,
            name: str = None,
            retry: retries.Retry = gapic_v1.method.DEFAULT,
            timeout: float = None,
            metadata: Sequence[Tuple[str, str]] = (),
            ) -> entity_type.EntityType:
        r"""Retrieves the specified entity type.
        Args:
            request (google.cloud.dialogflowcx_v3beta1.types.GetEntityTypeRequest):
                The request object. The request message for
                [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.GetEntityType].
            name (str):
                Required. The name of the entity type. Format:
                ``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/entityTypes/<Entity Type ID>``.
                This corresponds to the ``name`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.
        Returns:
            google.cloud.dialogflowcx_v3beta1.types.EntityType:
                Entities are extracted from user input and represent parameters that are
                   meaningful to your application. For example, a date
                   range, a proper name such as a geographic location or
                   landmark, and so on. Entities represent actionable
                   data for your application.
                   When you define an entity, you can also include
                   synonyms that all map to that entity. For example,
                   "soft drink", "soda", "pop", and so on.
                   There are three types of entities:
                   -  **System** - entities that are defined by the
                      Dialogflow API for common data types such as date,
                      time, currency, and so on. A system entity is
                      represented by the EntityType type.
                   -  **Custom** - entities that are defined by you that
                      represent actionable data that is meaningful to
                      your application. For example, you could define a
                      pizza.sauce entity for red or white pizza sauce, a
                      pizza.cheese entity for the different types of
                      cheese on a pizza, a pizza.topping entity for
                      different toppings, and so on. A custom entity is
                      represented by the EntityType type.
                   -  **User** - entities that are built for an
                      individual user such as favorites, preferences,
                      playlists, and so on. A user entity is represented
                      by the
                      [SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
                      type.
                   For more information about entity types, see the
                   [Dialogflow
                   documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
        """
        # Create or coerce a protobuf request object.
        # Sanity check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([name])
        if request is not None and has_flattened_params:
            raise ValueError('If the `request` argument is set, then none of '
                             'the individual field arguments should be set.')
        # Minor optimization to avoid making a copy if the user passes
        # in a entity_type.GetEntityTypeRequest.
        # There's no risk of modifying the input as we've already verified
        # there are no flattened fields.
        if not isinstance(request, entity_type.GetEntityTypeRequest):
            request = entity_type.GetEntityTypeRequest(request)
            # If we have keyword arguments corresponding to fields on the
            # request, apply these.
            if name is not None:
                request.name = name
        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = self._transport._wrapped_methods[self._transport.get_entity_type]
        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (
            gapic_v1.routing_header.to_grpc_metadata((
                ("name", request.name),
            )),
        )
        # Send the request.
        response = rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
        # Done; return the response.
        return response
    def create_entity_type(self,
            request: gcdc_entity_type.CreateEntityTypeRequest = None,
            *,
            parent: str = None,
            entity_type: gcdc_entity_type.EntityType = None,
            retry: retries.Retry = gapic_v1.method.DEFAULT,
            timeout: float = None,
            metadata: Sequence[Tuple[str, str]] = (),
            ) -> gcdc_entity_type.EntityType:
        r"""Creates an entity type in the specified agent.
        Args:
            request (google.cloud.dialogflowcx_v3beta1.types.CreateEntityTypeRequest):
                The request object. The request message for
                [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.CreateEntityType].
            parent (str):
                Required. The agent to create a entity type for. Format:
                ``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
                This corresponds to the ``parent`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            entity_type (google.cloud.dialogflowcx_v3beta1.types.EntityType):
                Required. The entity type to create.
                This corresponds to the ``entity_type`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.
        Returns:
            google.cloud.dialogflowcx_v3beta1.types.EntityType:
                Entities are extracted from user input and represent parameters that are
                   meaningful to your application. For example, a date
                   range, a proper name such as a geographic location or
                   landmark, and so on. Entities represent actionable
                   data for your application.
                   When you define an entity, you can also include
                   synonyms that all map to that entity. For example,
                   "soft drink", "soda", "pop", and so on.
                   There are three types of entities:
                   -  **System** - entities that are defined by the
                      Dialogflow API for common data types such as date,
                      time, currency, and so on. A system entity is
                      represented by the EntityType type.
                   -  **Custom** - entities that are defined by you that
                      represent actionable data that is meaningful to
                      your application. For example, you could define a
                      pizza.sauce entity for red or white pizza sauce, a
                      pizza.cheese entity for the different types of
                      cheese on a pizza, a pizza.topping entity for
                      different toppings, and so on. A custom entity is
                      represented by the EntityType type.
                   -  **User** - entities that are built for an
                      individual user such as favorites, preferences,
                      playlists, and so on. A user entity is represented
                      by the
                      [SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
                      type.
                   For more information about entity types, see the
                   [Dialogflow
                   documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
        """
        # Create or coerce a protobuf request object.
        # Sanity check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([parent, entity_type])
        if request is not None and has_flattened_params:
            raise ValueError('If the `request` argument is set, then none of '
                             'the individual field arguments should be set.')
        # Minor optimization to avoid making a copy if the user passes
        # in a gcdc_entity_type.CreateEntityTypeRequest.
        # There's no risk of modifying the input as we've already verified
        # there are no flattened fields.
        if not isinstance(request, gcdc_entity_type.CreateEntityTypeRequest):
            request = gcdc_entity_type.CreateEntityTypeRequest(request)
            # If we have keyword arguments corresponding to fields on the
            # request, apply these.
            if parent is not None:
                request.parent = parent
            if entity_type is not None:
                request.entity_type = entity_type
        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = self._transport._wrapped_methods[self._transport.create_entity_type]
        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (
            gapic_v1.routing_header.to_grpc_metadata((
                ("parent", request.parent),
            )),
        )
        # Send the request.
        response = rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
        # Done; return the response.
        return response
    def update_entity_type(self,
            request: gcdc_entity_type.UpdateEntityTypeRequest = None,
            *,
            entity_type: gcdc_entity_type.EntityType = None,
            update_mask: field_mask_pb2.FieldMask = None,
            retry: retries.Retry = gapic_v1.method.DEFAULT,
            timeout: float = None,
            metadata: Sequence[Tuple[str, str]] = (),
            ) -> gcdc_entity_type.EntityType:
        r"""Updates the specified entity type.
        Note: You should always train a flow prior to sending it
        queries. See the `training
        documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
        Args:
            request (google.cloud.dialogflowcx_v3beta1.types.UpdateEntityTypeRequest):
                The request object. The request message for
                [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.UpdateEntityType].
            entity_type (google.cloud.dialogflowcx_v3beta1.types.EntityType):
                Required. The entity type to update.
                This corresponds to the ``entity_type`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            update_mask (google.protobuf.field_mask_pb2.FieldMask):
                The mask to control which fields get
                updated.
                This corresponds to the ``update_mask`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.
        Returns:
            google.cloud.dialogflowcx_v3beta1.types.EntityType:
                Entities are extracted from user input and represent parameters that are
                   meaningful to your application. For example, a date
                   range, a proper name such as a geographic location or
                   landmark, and so on. Entities represent actionable
                   data for your application.
                   When you define an entity, you can also include
                   synonyms that all map to that entity. For example,
                   "soft drink", "soda", "pop", and so on.
                   There are three types of entities:
                   -  **System** - entities that are defined by the
                      Dialogflow API for common data types such as date,
                      time, currency, and so on. A system entity is
                      represented by the EntityType type.
                   -  **Custom** - entities that are defined by you that
                      represent actionable data that is meaningful to
                      your application. For example, you could define a
                      pizza.sauce entity for red or white pizza sauce, a
                      pizza.cheese entity for the different types of
                      cheese on a pizza, a pizza.topping entity for
                      different toppings, and so on. A custom entity is
                      represented by the EntityType type.
                   -  **User** - entities that are built for an
                      individual user such as favorites, preferences,
                      playlists, and so on. A user entity is represented
                      by the
                      [SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
                      type.
                   For more information about entity types, see the
                   [Dialogflow
                   documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
        """
        # Create or coerce a protobuf request object.
        # Sanity check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([entity_type, update_mask])
        if request is not None and has_flattened_params:
            raise ValueError('If the `request` argument is set, then none of '
                             'the individual field arguments should be set.')
        # Minor optimization to avoid making a copy if the user passes
        # in a gcdc_entity_type.UpdateEntityTypeRequest.
        # There's no risk of modifying the input as we've already verified
        # there are no flattened fields.
        if not isinstance(request, gcdc_entity_type.UpdateEntityTypeRequest):
            request = gcdc_entity_type.UpdateEntityTypeRequest(request)
            # If we have keyword arguments corresponding to fields on the
            # request, apply these.
            if entity_type is not None:
                request.entity_type = entity_type
            if update_mask is not None:
                request.update_mask = update_mask
        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = self._transport._wrapped_methods[self._transport.update_entity_type]
        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (
            gapic_v1.routing_header.to_grpc_metadata((
                ("entity_type.name", request.entity_type.name),
            )),
        )
        # Send the request.
        response = rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
        # Done; return the response.
        return response
    def delete_entity_type(self,
            request: entity_type.DeleteEntityTypeRequest = None,
            *,
            name: str = None,
            retry: retries.Retry = gapic_v1.method.DEFAULT,
            timeout: float = None,
            metadata: Sequence[Tuple[str, str]] = (),
            ) -> None:
        r"""Deletes the specified entity type.
        Note: You should always train a flow prior to sending it
        queries. See the `training
        documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
        Args:
            request (google.cloud.dialogflowcx_v3beta1.types.DeleteEntityTypeRequest):
                The request object. The request message for
                [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.DeleteEntityType].
            name (str):
                Required. The name of the entity type to delete. Format:
                ``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/entityTypes/<Entity Type ID>``.
                This corresponds to the ``name`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.
        """
        # Create or coerce a protobuf request object.
        # Sanity check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([name])
        if request is not None and has_flattened_params:
            raise ValueError('If the `request` argument is set, then none of '
                             'the individual field arguments should be set.')
        # Minor optimization to avoid making a copy if the user passes
        # in a entity_type.DeleteEntityTypeRequest.
        # There's no risk of modifying the input as we've already verified
        # there are no flattened fields.
        if not isinstance(request, entity_type.DeleteEntityTypeRequest):
            request = entity_type.DeleteEntityTypeRequest(request)
            # If we have keyword arguments corresponding to fields on the
            # request, apply these.
            if name is not None:
                request.name = name
        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = self._transport._wrapped_methods[self._transport.delete_entity_type]
        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (
            gapic_v1.routing_header.to_grpc_metadata((
                ("name", request.name),
            )),
        )
        # Send the request.
        rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
try:
    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
        gapic_version=pkg_resources.get_distribution(
            "google-cloud-dialogflowcx",
        ).version,
    )
except pkg_resources.DistributionNotFound:
    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
    "EntityTypesClient",
)
 | 
	apache-2.0 | 7,160,798,180,582,101,000 | 44.345433 | 180 | 0.601704 | false | 
| 
	CloudVE/djcloudbridge | 
	djcloudbridge/drf_routers.py | 
	1 | 
	1991 | 
	from django.conf.urls import url
from rest_framework import routers, viewsets
from rest_framework_nested import routers as nested_routers
class HybridRoutingMixin(object):
    """
    Extends functionality of DefaultRouter adding possibility to register
    simple API views, not just Viewsets.
    Based on:
    http://stackoverflow.com/questions/18818179/routing-api-views-in-django-rest-framework
    http://stackoverflow.com/questions/18817988/using-django-rest-frameworks-browsable-api-with-apiviews
    """
    def get_routes(self, viewset):
        """
        Checks if the viewset is an instance of ViewSet, otherwise assumes
        it's a simple view and does not run original `get_routes` code.
        """
        if issubclass(viewset, viewsets.ViewSetMixin):
            return super(HybridRoutingMixin, self).get_routes(viewset)
        return []
    def get_urls(self):
        """
        Append non-viewset views to the urls generated by the original
        `get_urls` method.
        """
        # URLs for viewsets
        ret = super(HybridRoutingMixin, self).get_urls()
        # URLs for simple views
        for prefix, viewset, basename in self.registry:
            # Skip viewsets
            if issubclass(viewset, viewsets.ViewSetMixin):
                continue
            # URL regex
            regex = '{prefix}{trailing_slash}$'.format(
                prefix=prefix,
                trailing_slash=self.trailing_slash
            )
            # The view name has to have suffix "-list" due to specifics
            # of the DefaultRouter implementation.
            ret.append(url(regex, viewset.as_view(),
                           name='{0}-list'.format(basename)))
        return ret
class HybridDefaultRouter(HybridRoutingMixin, routers.DefaultRouter):
    pass
class HybridSimpleRouter(HybridRoutingMixin, routers.SimpleRouter):
    pass
class HybridNestedRouter(HybridRoutingMixin, nested_routers.NestedSimpleRouter):
    pass
 | 
	mit | 7,499,356,688,837,228,000 | 29.630769 | 104 | 0.64892 | false | 
| 
	brentjens/rm-synthesis | 
	doc/source/conf.py | 
	1 | 
	7970 | 
	# -*- coding: utf-8 -*-
#
# RM-Synthesis documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 30 10:43:10 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.imgmath', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'RM-Synthesis'
copyright = u'2012, M.A. Brentjens <[email protected]>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '0.9'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further.  For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.  If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar.  Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it.  The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'RM-Synthesisdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
  ('index', 'RM-Synthesis.tex', u'RM-Synthesis Documentation',
   u'M.A. Brentjens \\textless{}[email protected]\\textgreater{}', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
    ('index', 'rm-synthesis', u'RM-Synthesis Documentation',
     [u'M.A. Brentjens <[email protected]>'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
#  dir menu entry, description, category)
texinfo_documents = [
  ('index', 'RM-Synthesis', u'RM-Synthesis Documentation',
   u'M.A. Brentjens <[email protected]>', 'RM-Synthesis', 'One line description of project.',
   'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
 | 
	mit | -7,558,729,425,361,679,000 | 31.933884 | 102 | 0.705019 | false | 
| 
	Disiok/poetry-seq2seq | 
	model.py | 
	1 | 
	22939 | 
	#! /usr/bin/env python
#-*- coding:utf-8 -*-
# standard
import os
from IPython import embed
# framework
import tensorflow as tf
from tensorflow.contrib import seq2seq, rnn
from tensorflow.python.layers.core import Dense
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
class Seq2SeqModel:
    """
    Seq2Seq model based on tensorflow.contrib.seq2seq
    """
    def __init__(self, config, mode):
        assert mode.lower() in ['train', 'predict']
        self.config = config
        self.mode = mode.lower()
        self.cell_type = config['cell_type']
        self.hidden_units = config['hidden_units']
        self.bidirectional = config['bidirectional']
        self.decoder_hidden_units = self.hidden_units * (2 if self.bidirectional else 1)
        self.depth = config['depth']
        self.attention_type = config['attention_type']
        self.embedding_size = config['embedding_size']
        self.vocab_size = config['vocab_size']
        self.num_encoder_symbols = config['num_encoder_symbols']
        self.num_decoder_symbols = config['num_decoder_symbols']
        self.use_residual = config['use_residual']
        self.attn_input_feeding = config['attn_input_feeding']
        self.use_dropout = config['use_dropout']
        self.keep_prob = 1.0 - config['dropout_rate']
        self.optimizer = config['optimizer']
        self.learning_rate = config['learning_rate']
        self.max_gradient_norm = config['max_gradient_norm']
        self.global_step = tf.Variable(0, trainable=False, name='global_step')
        self.global_epoch_step = tf.Variable(0, trainable=False, name='global_epoch_step')
        self.increment_global_epoch_step_op = tf.assign(self.global_epoch_step, self.global_epoch_step + 1)
        self.dtype = tf.float16 if config['use_fp16'] else tf.float32
        self.keep_prob_placeholder = tf.placeholder(self.dtype, shape=[], name='keep_prob')
        self.use_beamsearch_decode=False
        if self.mode == 'predict':
            self.beam_width = config['beam_width']
            self.use_beamsearch_decode = True if self.beam_width > 1 else False
            self.max_decode_step = config['max_decode_step']
            self.predict_mode = config['predict_mode']
        elif self.mode == 'train':
            self.train_mode = config['train_mode']
            self.sampling_probability = config['sampling_probability']
        self.start_token = config['start_token']
        self.end_token = config['end_token']
        self.build_model()
    def build_model(self):
        print 'Building model...'
        # Build encoder and decoder networks
        self.init_placeholders()
        self.build_encoder()
        self.build_decoder()
        # Merge all the training summaries
        self.summary_op = tf.summary.merge_all()
    def init_placeholders(self):
        # TODO(sdsuo): Understand dropout
        self.keep_prob_placeholder = tf.placeholder(self.dtype, shape=[], name='keep_prob')
        # embedding_placeholder: [vocab_size, hidden_units]
        self.embedding_placeholder = tf.placeholder(
            name='embedding_placeholder',
            shape=[self.vocab_size, self.hidden_units],
            dtype=self.dtype
        )
        self.embedding = tf.get_variable(
            name='embedding',
            shape=[self.vocab_size, self.hidden_units],
            trainable=False,
        )
        self.assign_embedding_op = self.embedding.assign(self.embedding_placeholder)
        # encode_inputs: [batch_size, time_steps]
        self.encoder_inputs = tf.placeholder(
            name='encoder_inputs',
            shape=(None, None),
            dtype=tf.int32
        )
        # encoder_inputs_length: [batch_size]
        self.encoder_inputs_length = tf.placeholder(
            name='encoder_inputs_length',
            shape=(None,),
            dtype=tf.int32
        )
        # use dynamic batch_size based on input
        self.batch_size = tf.shape(self.encoder_inputs)[0]
        if self.mode == 'train':
            # decoder_inputs: [batch_size, max_time_steps]
            self.decoder_inputs = tf.placeholder(
                dtype=tf.int32,
                shape=(None, None),
                name='decoder_inputs'
            )
            # decoder_inputs_length: [batch_size]
            self.decoder_inputs_length = tf.placeholder(
                dtype=tf.int32,
                shape=(None,),
                name='decoder_inputs_length'
            )
            decoder_start_token = tf.ones(
                shape=[self.batch_size, 1],
                dtype=tf.int32
            ) * self.start_token
            decoder_end_token = tf.ones(
                shape=[self.batch_size, 1],
                dtype=tf.int32
            ) * self.end_token
            # decoder_inputs_train: [batch_size , max_time_steps + 1]
            # insert _GO symbol in front of each decoder input
            self.decoder_inputs_train = tf.concat([decoder_start_token,
                                                  self.decoder_inputs], axis=1)
            # decoder_inputs_length_train: [batch_size]
            self.decoder_inputs_length_train = self.decoder_inputs_length + 1
            # decoder_targets_train: [batch_size, max_time_steps + 1]
            # insert EOS symbol at the end of each decoder input
            self.decoder_targets_train = tf.concat([self.decoder_inputs,
                                                   decoder_end_token], axis=1)
    def build_single_cell(self, hidden_units):
        if self.cell_type == 'gru':
            cell_type = rnn.GRUCell
        elif self.cell_type == 'lstm':
            cell_type = rnn.LSTMCell
        else:
            raise RuntimeError('Unknown cell type!')
        cell = cell_type(hidden_units)
        return cell
    def build_encoder_cell(self):
        multi_cell = rnn.MultiRNNCell([self.build_single_cell(self.hidden_units) for _ in range(self.depth)])
        return multi_cell
    def build_encoder(self):
        print 'Building encoder...'
        with tf.variable_scope('encoder'):
            # embedded inputs: [batch_size, time_step, embedding_size]
            self.encoder_inputs_embedded = tf.nn.embedding_lookup(
                params=self.embedding,
                ids=self.encoder_inputs
            )
            # TODO(sdsuo): Decide if we need a Dense input layer here
            if self.bidirectional:
                # Build encoder cell
                self.encoder_cell_fw = self.build_encoder_cell()
                self.encoder_cell_bw = self.build_encoder_cell()
                # Encode input sequences into context vectors
                # encoder_outputs: [batch_size, time_step, cell_output_size]
                # encoder_last_state: [batch_size, cell_output_size]
                self.encoder_outputs_fw_bw, self.encoder_last_state_fw_bw = tf.nn.bidirectional_dynamic_rnn(
                    cell_fw=self.encoder_cell_fw,
                    cell_bw=self.encoder_cell_bw,
                    inputs=self.encoder_inputs_embedded,
                    sequence_length=self.encoder_inputs_length,
                    dtype=self.dtype,
                    time_major=False
                )
                self.encoder_outputs_fw, self.encoder_outputs_bw = self.encoder_outputs_fw_bw
                self.encoder_outputs = tf.concat([self.encoder_outputs_fw, self.encoder_outputs_bw], 2)
                self.encoder_last_state_fw, self.encoder_last_state_bw = self.encoder_last_state_fw_bw
                encoder_last_state_zipped = zip(self.encoder_last_state_fw, self.encoder_last_state_bw)
                encoder_last_state_list = [rnn.LSTMStateTuple(c=tf.concat([fw.c, bw.c], 1), h=tf.concat([fw.h, bw.h], 1))
                                           for fw, bw in encoder_last_state_zipped]
                self.encoder_last_state = tuple(encoder_last_state_list)
            else:
                self.encoder_cell = self.build_encoder_cell()
                # Encode input sequences into context vectors
                # encoder_outputs: [batch_size, time_step, cell_output_size]
                # encoder_last_state: [batch_size, cell_output_size]
                self.encoder_outputs, self.encoder_last_state = tf.nn.dynamic_rnn(
                    cell=self.encoder_cell,
                    inputs=self.encoder_inputs_embedded,
                    sequence_length=self.encoder_inputs_length,
                    dtype=self.dtype,
                    time_major=False
                )
    def build_decoder_cell(self):
        # TODO(sdsuo): Read up and decide whether to use beam search
        self.attention_mechanism = seq2seq.BahdanauAttention(
            num_units=self.decoder_hidden_units,
            memory=self.encoder_outputs,
            memory_sequence_length=self.encoder_inputs_length
        )
        self.decoder_cell_list = [
            self.build_single_cell(self.decoder_hidden_units) for _ in range(self.depth)
        ]
        # NOTE(sdsuo): Not sure what this does yet
        def attn_decoder_input_fn(inputs, attention):
            if not self.attn_input_feeding:
                return inputs
            # Essential when use_residual=True
            _input_layer = Dense(self.decoder_hidden_units, dtype=self.dtype,
                                 name='attn_input_feeding')
            return _input_layer(rnn.array_ops.concat([inputs, attention], -1))
        # NOTE(sdsuo): Attention mechanism is implemented only on the top decoder layer
        self.decoder_cell_list[-1] = seq2seq.AttentionWrapper(
            cell=self.decoder_cell_list[-1],
            attention_mechanism=self.attention_mechanism,
            attention_layer_size=self.decoder_hidden_units,
            cell_input_fn=attn_decoder_input_fn,
            initial_cell_state=self.encoder_last_state[-1],
            alignment_history=False,
            name='attention_wrapper'
        )
        # NOTE(sdsuo): Not sure why this is necessary
        # To be compatible with AttentionWrapper, the encoder last state
        # of the top layer should be converted into the AttentionWrapperState form
        # We can easily do this by calling AttentionWrapper.zero_state
        # Also if beamsearch decoding is used, the batch_size argument in .zero_state
        # should be ${decoder_beam_width} times to the origianl batch_size
        if self.use_beamsearch_decode:
            batch_size = self.batch_size * self.beam_width
        else:
            batch_size = self.batch_size
        # NOTE(vera): important dimension here
        # embed()
        initial_state = [state for state in self.encoder_last_state]
        initial_state[-1] = self.decoder_cell_list[-1].zero_state(
            batch_size=batch_size,
            dtype=self.dtype
        )
        decoder_initial_state = tuple(initial_state)
        return rnn.MultiRNNCell(self.decoder_cell_list), decoder_initial_state
    def build_train_decoder(self):
        self.decoder_inputs_embedded = tf.nn.embedding_lookup(
            params=self.embedding,
            ids=self.decoder_inputs_train
        )
        if self.train_mode == 'ground_truth':
            training_helper = seq2seq.TrainingHelper(
                inputs=self.decoder_inputs_embedded,
                sequence_length=self.decoder_inputs_length_train,
                time_major=False,
                name='training_helper'
            )
        elif self.train_mode == 'scheduled_sampling':
            training_helper = seq2seq.ScheduledEmbeddingTrainingHelper(
                inputs=self.decoder_inputs_embedded,
                sequence_length=self.decoder_inputs_length_train,
                embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs),
                sampling_probability=self.sampling_probability,
                name='scheduled_embedding_training_helper'
            )
        else:
            raise NotImplementedError('Train mode: {} is not yet implemented'.format(self.train_mode))
        training_decoder = seq2seq.BasicDecoder(
            cell=self.decoder_cell,
            helper=training_helper,
            initial_state=self.decoder_initial_state,
            output_layer=self.output_layer
        )
        max_decoder_length = tf.reduce_max(self.decoder_inputs_length_train)
        self.decoder_outputs_train, self.decoder_last_state_train, self.decoder_outputs_length_train = seq2seq.dynamic_decode(
            decoder=training_decoder,
            output_time_major=False,
            impute_finished=True,
            maximum_iterations=max_decoder_length
        )
        # NOTE(sdsuo): Not sure why this is necessary
        self.decoder_logits_train = tf.identity(self.decoder_outputs_train.rnn_output)
         # Use argmax to extract decoder symbols to emit
        self.decoder_pred_train = tf.argmax(
            self.decoder_logits_train,
            axis=-1,
            name='decoder_pred_train'
        )
        # masks: masking for valid and padded time steps, [batch_size, max_time_step + 1]
        masks = tf.sequence_mask(
            lengths=self.decoder_inputs_length_train,
            maxlen=max_decoder_length,
            dtype=self.dtype,
            name='masks'
        )
        # Computes per word average cross-entropy over a batch
        # Internally calls 'nn_ops.sparse_softmax_cross_entropy_with_logits' by default
        self.loss = seq2seq.sequence_loss(
            logits=self.decoder_logits_train,
            targets=self.decoder_targets_train,
            weights=masks,
            average_across_timesteps=True,
            average_across_batch=True
        )
        # Training summary for the current batch_loss
        tf.summary.scalar('loss', self.loss)
        # Contruct graphs for minimizing loss
        self.init_optimizer()
    def build_predict_decoder(self):
        # start_tokens: [batch_size,]
        start_tokens = tf.ones([self.batch_size,], tf.int32) * self.start_token
        end_token =self.end_token
        if not self.use_beamsearch_decode:
            # Helper to feed inputs for greedy decoding: use the argmax of the output
            if self.predict_mode == 'sample':
                print 'Building sample decoder...'
                decoding_helper = seq2seq.SampleEmbeddingHelper(
                    start_tokens=start_tokens,
                    end_token=end_token,
                    embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs)
                )
            elif self.predict_mode == 'greedy':
                print 'Building greedy decoder...'
                decoding_helper = seq2seq.GreedyEmbeddingHelper(
                    start_tokens=start_tokens,
                    end_token=end_token,
                    embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs)
                )
            else:
                raise NotImplementedError('Predict mode: {} is not yet implemented'.format(self.predict_mode))
            inference_decoder = seq2seq.BasicDecoder(
                cell=self.decoder_cell,
                helper=decoding_helper,
                initial_state=self.decoder_initial_state,
                output_layer=self.output_layer
            )
        else:
            raise NotImplementedError('Beamsearch decode is not yet implemented.')
        self.decoder_outputs_decode, self.decoder_last_state_decode,self.decoder_outputs_length_decode = seq2seq.dynamic_decode(
            decoder=inference_decoder,
            output_time_major=False,
            maximum_iterations=self.max_decode_step
        )
        if not self.use_beamsearch_decode:
            self.decoder_pred_decode = tf.expand_dims(self.decoder_outputs_decode.sample_id, -1)
        else:
            raise NotImplementedError('{} mode is not recognized.'.format(self.mode))
    def build_decoder(self):
        print 'Building decoder...'
        with tf.variable_scope('decoder'):
            # Building decoder_cell and decoder_initial_state
            self.decoder_cell, self.decoder_initial_state = self.build_decoder_cell()
            # Output projection layer to convert cell_outputs to logits
            self.output_layer = Dense(self.vocab_size, name='output_projection')
            if self.mode == 'train':
                self.build_train_decoder()
            elif self.mode == 'predict':
                self.build_predict_decoder()
            else:
                raise RuntimeError
    def init_optimizer(self):
        print("Setting optimizer..")
        # Gradients and SGD update operation for training the model
        trainable_params = tf.trainable_variables()
        if self.optimizer.lower() == 'adadelta':
            self.opt = tf.train.AdadeltaOptimizer(learning_rate=self.learning_rate)
        elif self.optimizer.lower() == 'adam':
            self.opt = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
        elif self.optimizer.lower() == 'rmsprop':
            self.opt = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate)
        else:
            self.opt = tf.train.GradientDescentOptimizer(learning_rate=self.learning_rate)
        # Compute gradients of loss w.r.t. all trainable variables
        gradients = tf.gradients(self.loss, trainable_params)
        # Clip gradients by a given maximum_gradient_norm
        clip_gradients, _ = tf.clip_by_global_norm(gradients, self.max_gradient_norm)
        # Update the model
        self.updates = self.opt.apply_gradients(
            zip(clip_gradients, trainable_params), global_step=self.global_step)
    def save(self, sess, saver, path, global_step=None):
        """
        Args:
            sess:
            path:
            var_list:
            global_step:
        Returns:
        """
        save_path = saver.save(sess, save_path=path, global_step=global_step)
        print 'Model saved at {}'.format(save_path)
    def restore(self, sess, saver, path):
        """
        Args:
            sess:
            path:
            var_list:
        Returns:
        """
        saver.restore(sess, save_path=path)
        print 'Model restored from {}'.format(path)
    def train(self, sess, encoder_inputs, encoder_inputs_length,
              decoder_inputs, decoder_inputs_length):
        """Run a train step of the model feeding the given inputs.
        Args:
          session: tensorflow session to use.
          encoder_inputs: a numpy int matrix of [batch_size, max_source_time_steps]
              to feed as encoder inputs
          encoder_inputs_length: a numpy int vector of [batch_size]
              to feed as sequence lengths for each element in the given batch
          decoder_inputs: a numpy int matrix of [batch_size, max_target_time_steps]
              to feed as decoder inputs
          decoder_inputs_length: a numpy int vector of [batch_size]
              to feed as sequence lengths for each element in the given batch
        Returns:
          A triple consisting of gradient norm (or None if we did not do backward),
          average perplexity, and the outputs.
        """
        # Check if the model is in training mode
        if self.mode != 'train':
            raise ValueError('Train step can only be operated in train mode')
        input_feed = self.check_feeds(encoder_inputs, encoder_inputs_length,
                                      decoder_inputs, decoder_inputs_length, False)
        # TODO(sdsuo): Understand keep prob
        input_feed[self.keep_prob_placeholder.name] = self.keep_prob
        output_feed = [
            self.updates,   # Update Op that does optimization
            self.loss,      # Loss for current batch
            self.summary_op # Training summary
        ]
        outputs = sess.run(output_feed, input_feed)
        return outputs[1], outputs[2]   # loss, summary
    def predict(self, sess, encoder_inputs, encoder_inputs_length):
        input_feed = self.check_feeds(encoder_inputs, encoder_inputs_length,
                                      decoder_inputs=None, decoder_inputs_length=None,
                                      predict=True)
        # Input feeds for dropout
        input_feed[self.keep_prob_placeholder.name] = 1.0
        output_feed = [self.decoder_pred_decode]
        outputs = sess.run(output_feed, input_feed)
        # GreedyDecoder: [batch_size, max_time_step]
        # BeamSearchDecoder: [batch_size, max_time_step, beam_width]
        return outputs[0]
    def init_vars(self, sess, embedding):
        sess.run([self.assign_embedding_op], feed_dict={
            self.embedding_placeholder: embedding
        })
    def check_feeds(self, encoder_inputs, encoder_inputs_length,
                    decoder_inputs, decoder_inputs_length, predict):
        """
        Args:
          encoder_inputs: a numpy int matrix of [batch_size, max_source_time_steps]
              to feed as encoder inputs
          encoder_inputs_length: a numpy int vector of [batch_size]
              to feed as sequence lengths for each element in the given batch
          decoder_inputs: a numpy int matrix of [batch_size, max_target_time_steps]
              to feed as decoder inputs
          decoder_inputs_length: a numpy int vector of [batch_size]
              to feed as sequence lengths for each element in the given batch
          predict: a scalar boolean that indicates predict mode
        Returns:
          A feed for the model that consists of encoder_inputs, encoder_inputs_length,
          decoder_inputs, decoder_inputs_length
        """
        input_batch_size = encoder_inputs.shape[0]
        if input_batch_size != encoder_inputs_length.shape[0]:
            raise ValueError("Encoder inputs and their lengths must be equal in their "
                "batch_size, %d != %d" % (input_batch_size, encoder_inputs_length.shape[0]))
        if not predict:
            target_batch_size = decoder_inputs.shape[0]
            if target_batch_size != input_batch_size:
                raise ValueError("Encoder inputs and Decoder inputs must be equal in their "
                    "batch_size, %d != %d" % (input_batch_size, target_batch_size))
            if target_batch_size != decoder_inputs_length.shape[0]:
                raise ValueError("Decoder targets and their lengths must be equal in their "
                    "batch_size, %d != %d" % (target_batch_size, decoder_inputs_length.shape[0]))
        input_feed = {}
        input_feed[self.encoder_inputs.name] = encoder_inputs
        input_feed[self.encoder_inputs_length.name] = encoder_inputs_length
        if not predict:
            input_feed[self.decoder_inputs.name] = decoder_inputs
            input_feed[self.decoder_inputs_length.name] = decoder_inputs_length
        return input_feed
if __name__ == '__main__':
    model = Seq2SeqModel()
    embed()
 | 
	mit | -8,786,661,998,776,761,000 | 39.314587 | 128 | 0.594097 | false | 
| 
	examachine/pisi | 
	tests/constantstests.py | 
	1 | 
	2098 | 
	# Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# Author:  Eray Ozkural <[email protected]>
import unittest
import pisi.context as ctx
class ContextTestCase(unittest.TestCase):
    
    def testConstness(self):
        const = ctx.const
        # test if we can get a const attribute?
        try:
            test = const.package_suffix
            self.assertNotEqual(test, "")
        except AttributeError:
            self.fail("Couldn't get const attribute")
        # test binding a new constant
        const.test = "test binding"
    
        # test re-binding (which is illegal)
        try:
            const.test = "test rebinding"
            # we shouldn't reach here
            self.fail("Rebinding a constant works. Something is wrong!")
        except:
            # we achived our goal with this error. infact, this is a
            # ConstError but we can't catch it directly here
            pass
        # test unbinding (which is also illegal)
        try:
            del const.test
            # we shouldn't reach here
            self.fail("Unbinding a constant works. Something is wrong!")
        except:
            # we achived our goal with this error. infact, this is a
            # ConstError but we can't catch it directly here
            pass
    def testConstValues(self):
        const = ctx.const
        constDict = {
            "actions_file": "actions.py",
            "setup_func": "setup",
            "metadata_xml": "metadata.xml"
            }
            
        for k in constDict.keys():
            if hasattr(const, k):
                value = getattr(const, k)
                self.assertEqual(value, constDict[k])
            else:
                self.fail("Constants does not have an attribute named %s" % k)
suite = unittest.makeSuite(ContextTestCase)
 | 
	gpl-3.0 | -8,041,903,705,030,690,000 | 29.852941 | 79 | 0.585796 | false | 
| 
	michalczaplinski/pitchfork | 
	tests/test_pitchfork.py | 
	1 | 
	1447 | 
	from pitchfork.pitchfork import search, Review
import json
import unittest
class TestReview(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        cls.review = search('mogwai', 'come on')
    def test_review(self):
        self.assertIsInstance(self.review, Review)
    def test_review_album(self):
        self.assertEqual(self.review.album(), "Come On Die Young")
    def test_reviev_artist(self):
        self.assertEqual(self.review.artist(), 'Mogwai')
    def test_reviev_best_new_music(self):
        self.assertEqual(self.review.best_new_music(), True)
    def test_review_label(self):
        self.assertEqual(self.review.label(), 'Chemikal Underground')
    def test_review_year(self):
        self.assertEqual(self.review.year(), '1999/2014')
    def test_score(self):
        self.assertEqual(self.review.score(), 8.3)
    def test_editorial(self):
        self.assertTrue(self.review.editorial().startswith('Though few of their songs contain actual words'))
    def test_review_url(self):
        self.assertEqual(self.review.url, '/reviews/albums/19466-mogwai-come-on-die-young-deluxe-edition/')
    def test_review_to_json(self):
        input_dict = self.review._json_safe_dict()
        output_dict = json.loads(self.review.to_json())
        for input_key in input_dict.keys():
            self.assertEqual(output_dict[input_key], input_dict[input_key])
if __name__ == '__main__':
    unittest.main()
 | 
	mit | 450,088,298,809,097,200 | 30.456522 | 109 | 0.665515 | false | 
| 
	korepwx/tfsnippet | 
	tests/trainer/test_base_trainer.py | 
	1 | 
	8778 | 
	import functools
import numpy as np
import pytest
import tensorflow as tf
from mock import Mock
from tfsnippet.dataflows import DataFlow
from tfsnippet.scaffold import TrainLoop, AnnealingVariable, EventKeys
from tfsnippet.trainer import *
from tfsnippet.utils import EventSource
class BaseTrainerTestCase(tf.test.TestCase):
    def test_props(self):
        loop = Mock(valid_metric_name='valid_loss')
        t = BaseTrainer(loop)
        self.assertIs(loop, t.loop)
        self.assertIsInstance(t.events, EventSource)
    def test_add_and_remove_hooks(self):
        loop = Mock(
            valid_metric_name='valid_loss',
            print_logs=Mock(return_value=None, __repr__=lambda o: 'print_logs')
        )
        df = Mock()
        eval1 = Evaluator(loop, 1., [], df)
        eval2 = Evaluator(loop, 2., [], df)
        anneal1 = AnnealingVariable('anneal1', 1., .5)
        anneal2 = AnnealingVariable('anneal2', 2., .5)
        # test add
        t = BaseTrainer(loop)
        t.log_after_steps(3)
        t.log_after_epochs(4)
        t.evaluate_after_steps(
            Mock(return_value=None, __repr__=lambda o: 'eval'), 5)
        t.evaluate_after_epochs(
            Mock(return_value=None, __repr__=lambda o: 'eval'), 6)
        t.anneal_after_steps(
            Mock(return_value=None, __repr__=lambda o: 'anneal'), 7)
        t.anneal_after_epochs(
            Mock(return_value=None, __repr__=lambda o: 'anneal'), 8)
        t.evaluate_after_steps(eval1, 9)
        t.evaluate_after_epochs(eval2, 10)
        t.anneal_after_steps(anneal1, 11)
        t.anneal_after_epochs(anneal2, 12)
        t.log_after(steps=13)
        t.log_after(epochs=14)
        t.evaluate_after(
            Mock(return_value=None, __repr__=lambda o: 'eval2'),
            steps=15
        )
        t.evaluate_after(
            Mock(return_value=None, __repr__=lambda o: 'eval2'),
            epochs=16
        )
        t.anneal_after(
            Mock(return_value=None, __repr__=lambda o: 'anneal2'),
            steps=17
        )
        t.anneal_after(
            Mock(return_value=None, __repr__=lambda o: 'anneal2'),
            epochs=18
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.STEP_EVALUATION]),
            '[eval:step:5, {!r}:step:9, eval2:step:15]'.format(eval1.run)
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.STEP_ANNEALING]),
            '[anneal:step:7, {!r}:step:11, anneal2:step:17]'.
            format(anneal1.anneal)
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.STEP_LOGGING]),
            '[print_logs:step:3, print_logs:step:13]'
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.EPOCH_EVALUATION]),
            '[eval:epoch:6, {!r}:epoch:10, eval2:epoch:16]'.format(eval2.run)
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.EPOCH_ANNEALING]),
            '[anneal:epoch:8, {!r}:epoch:12, anneal2:epoch:18]'.
            format(anneal2.anneal)
        )
        self.assertEqual(
            repr(t.events._event_handlers_map[EventKeys.EPOCH_LOGGING]),
            '[print_logs:epoch:4, print_logs:epoch:14]'
        )
        # test remove
        t.remove_log_hooks()
        self.assertNotIn(
            EventKeys.STEP_LOGGING, t.events._event_handlers_map)
        self.assertNotIn(
            EventKeys.EPOCH_LOGGING, t.events._event_handlers_map)
        t.remove_validation_hooks()
        self.assertNotIn(
            EventKeys.STEP_EVALUATION, t.events._event_handlers_map)
        self.assertNotIn(
            EventKeys.EPOCH_EVALUATION, t.events._event_handlers_map)
        t.remove_annealing_hooks()
        self.assertNotIn(
            EventKeys.STEP_ANNEALING, t.events._event_handlers_map)
        self.assertNotIn(
            EventKeys.EPOCH_ANNEALING, t.events._event_handlers_map)
        # test error add
        func_list = [
            t.log_after,
            functools.partial(t.evaluate_after, Mock()),
            functools.partial(t.anneal_after, Mock()),
        ]
        kwargs_list = [
            {'steps': None, 'epochs': None},
            {'steps': 1, 'epochs': 1}
        ]
        for func in func_list:
            for kwargs in kwargs_list:
                with pytest.raises(
                        ValueError, match='One and only one of `epochs` and '
                                          '`steps` should be specified'):
                    func(**kwargs)
    def test_hook_freq(self):
        loop = Mock(
            valid_metric_name='valid_loss',
            print_logs=Mock(return_value=None, __repr__=lambda o: 'print_logs')
        )
        t = BaseTrainer(loop)
        f = Mock()
        t.evaluate_after(f, steps=5)
        for i in range(1, 6):
            t.loop.step = i
            t.events.fire(EventKeys.STEP_EVALUATION, t)
        t.loop.step = 7
        t.events.fire(EventKeys.STEP_EVALUATION, t)
        t.loop.step = 10
        t.events.fire(EventKeys.STEP_EVALUATION, t)
        self.assertEqual(f.call_count, 2)
    def test_run(self):
        with self.test_session() as session:
            df = DataFlow.arrays([np.arange(6, dtype=np.float32)], batch_size=4)
            def log_event(m, trainer):
                logged_events.append((m, trainer))
            logged_events = []
            # test default loss weight and merged feed dict
            with TrainLoop([], max_epoch=2) as loop:
                t = BaseTrainer(loop)
                t._run_step = Mock(return_value=None)
                t._iter_steps = Mock(wraps=lambda: loop.iter_steps(df))
                for key in [EventKeys.BEFORE_EPOCH,
                            EventKeys.BEFORE_STEP,
                            EventKeys.STEP_ANNEALING,
                            EventKeys.STEP_EVALUATION,
                            EventKeys.STEP_LOGGING,
                            EventKeys.AFTER_STEP,
                            EventKeys.EPOCH_ANNEALING,
                            EventKeys.EPOCH_EVALUATION,
                            EventKeys.EPOCH_LOGGING,
                            EventKeys.AFTER_EPOCH]:
                    t.events.on(key, functools.partial(log_event, key))
                t.run()
                self.assertEqual(4, len(t._run_step.call_args_list))
                for i, call_args in enumerate(t._run_step.call_args_list[:-2]):
                    call_session, call_payload = call_args[0]
                    self.assertIs(session, call_session)
                    self.assertEqual(i + 1, call_payload[0])
                    self.assertIsInstance(call_payload[1], tuple)
                    self.assertEqual(1, len(call_payload[1]))
                    np.testing.assert_equal(
                        np.arange(6, dtype=np.float32)[i * 4: (i + 1) * 4],
                        call_payload[1][0]
                    )
                expected_logged_events = sum(
                    [
                        [
                            (EventKeys.BEFORE_EPOCH, t),
                        ] + sum([
                            [
                                (EventKeys.BEFORE_STEP, t),
                                (EventKeys.STEP_EVALUATION, t),
                                (EventKeys.STEP_ANNEALING, t),
                                (EventKeys.STEP_LOGGING, t),
                                (EventKeys.AFTER_STEP, t),
                            ]
                            for step in [0, 1]
                        ], []) + [
                            (EventKeys.EPOCH_EVALUATION, t),
                            (EventKeys.EPOCH_ANNEALING, t),
                            (EventKeys.EPOCH_LOGGING, t),
                            (EventKeys.AFTER_EPOCH, t)
                        ]
                        for epoch in [0, 1]
                    ],
                    []
                )
                self.assertListEqual(logged_events, expected_logged_events)
            # test re-entrant error
            with TrainLoop([], max_epoch=1) as loop:
                t = BaseTrainer(loop)
                t._run_step = Mock(return_value=None)
                t._iter_steps = Mock(wraps=lambda: loop.iter_steps(df))
                def reentrant_error(trainer):
                    self.assertIs(trainer, t)
                    with pytest.raises(
                            RuntimeError, match=r'`run\(\)` is not re-entrant'):
                        t.run()
                reentrant_error = Mock(wraps=reentrant_error)
                t.events.on(EventKeys.AFTER_STEP, reentrant_error)
                t.run()
                self.assertTrue(reentrant_error.called)
 | 
	mit | -1,548,108,472,786,013,200 | 37.5 | 80 | 0.503418 | false | 
| 
	mancoast/CPythonPyc_test | 
	fail/312_test_platform.py | 
	1 | 
	7787 | 
	import sys
import os
import unittest
import platform
import subprocess
from test import support
class PlatformTest(unittest.TestCase):
    def test_architecture(self):
        res = platform.architecture()
    if hasattr(os, "symlink"):
        def test_architecture_via_symlink(self): # issue3762
            def get(python):
                cmd = [python, '-c',
                    'import platform; print(platform.architecture())']
                p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
                return p.communicate()
            real = os.path.realpath(sys.executable)
            link = os.path.abspath(support.TESTFN)
            os.symlink(real, link)
            try:
                self.assertEqual(get(real), get(link))
            finally:
                os.remove(link)
    def test_platform(self):
        for aliased in (False, True):
            for terse in (False, True):
                res = platform.platform(aliased, terse)
    def test_system(self):
        res = platform.system()
    def test_node(self):
        res = platform.node()
    def test_release(self):
        res = platform.release()
    def test_version(self):
        res = platform.version()
    def test_machine(self):
        res = platform.machine()
    def test_processor(self):
        res = platform.processor()
    def setUp(self):
        self.save_version = sys.version
        self.save_subversion = sys.subversion
        self.save_platform = sys.platform
    def tearDown(self):
        sys.version = self.save_version
        sys.subversion = self.save_subversion
        sys.platform = self.save_platform
    def test_sys_version(self):
        # Old test.
        for input, output in (
            ('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]',
             ('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')),
            ('IronPython 1.0.60816 on .NET 2.0.50727.42',
             ('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')),
            ('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42',
             ('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
            ):
            # branch and revision are not "parsed", but fetched
            # from sys.subversion.  Ignore them
            (name, version, branch, revision, buildno, builddate, compiler) \
                   = platform._sys_version(input)
            self.assertEqual(
                (name, version, '', '', buildno, builddate, compiler), output)
        # Tests for python_implementation(), python_version(), python_branch(),
        # python_revision(), python_build(), and python_compiler().
        sys_versions = {
            ("2.6.1 (r261:67515, Dec  6 2008, 15:26:00) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]",
             ('CPython', 'tags/r261', '67515'), self.save_platform)
            :
                ("CPython", "2.6.1", "tags/r261", "67515",
                 ('r261:67515', 'Dec  6 2008 15:26:00'),
                 'GCC 4.0.1 (Apple Computer, Inc. build 5370)'),
            ("IronPython 2.0 (2.0.0.0) on .NET 2.0.50727.3053", None, "cli")
            :
                ("IronPython", "2.0.0", "", "", ("", ""),
                 ".NET 2.0.50727.3053"),
            ("2.5 (trunk:6107, Mar 26 2009, 13:02:18) \n[Java HotSpot(TM) Client VM (\"Apple Computer, Inc.\")]",
            ('Jython', 'trunk', '6107'), "java1.5.0_16")
            :
                ("Jython", "2.5.0", "trunk", "6107",
                 ('trunk:6107', 'Mar 26 2009'), "java1.5.0_16"),
            ("2.5.2 (63378, Mar 26 2009, 18:03:29)\n[PyPy 1.0.0]",
             ('PyPy', 'trunk', '63378'), self.save_platform)
            :
                ("PyPy", "2.5.2", "trunk", "63378", ('63378', 'Mar 26 2009'),
                 "")
            }
        for (version_tag, subversion, sys_platform), info in \
                sys_versions.items():
            sys.version = version_tag
            if subversion is None:
                if hasattr(sys, "subversion"):
                    del sys.subversion
            else:
                sys.subversion = subversion
            if sys_platform is not None:
                sys.platform = sys_platform
            self.assertEqual(platform.python_implementation(), info[0])
            self.assertEqual(platform.python_version(), info[1])
            self.assertEqual(platform.python_branch(), info[2])
            self.assertEqual(platform.python_revision(), info[3])
            self.assertEqual(platform.python_build(), info[4])
            self.assertEqual(platform.python_compiler(), info[5])
    def test_system_alias(self):
        res = platform.system_alias(
            platform.system(),
            platform.release(),
            platform.version(),
        )
    def test_uname(self):
        res = platform.uname()
        self.assertTrue(any(res))
    def test_java_ver(self):
        res = platform.java_ver()
        if sys.platform == 'java':
            self.assertTrue(all(res))
    def test_win32_ver(self):
        res = platform.win32_ver()
    def test_mac_ver(self):
        res = platform.mac_ver()
        if platform.uname()[0] == 'Darwin':
            # We're on a MacOSX system, check that
            # the right version information is returned
            fd = os.popen('sw_vers', 'r')
            real_ver = None
            for ln in fd:
                if ln.startswith('ProductVersion:'):
                    real_ver = ln.strip().split()[-1]
                    break
            fd.close()
            self.assertFalse(real_ver is None)
            result_list = res[0].split('.')
            expect_list = real_ver.split('.')
            len_diff = len(result_list) - len(expect_list)
            # On Snow Leopard, sw_vers reports 10.6.0 as 10.6
            if len_diff > 0:
                expect_list.extend(['0'] * len_diff)
            self.assertEquals(result_list, expect_list)
            # res[1] claims to contain
            # (version, dev_stage, non_release_version)
            # That information is no longer available
            self.assertEquals(res[1], ('', '', ''))
            if sys.byteorder == 'little':
                self.assertEquals(res[2], 'i386')
            else:
                self.assertEquals(res[2], 'PowerPC')
    def test_dist(self):
        res = platform.dist()
    def test_libc_ver(self):
        import os
        if os.path.isdir(sys.executable) and \
           os.path.exists(sys.executable+'.exe'):
            # Cygwin horror
            executable = executable + '.exe'
        res = platform.libc_ver(sys.executable)
    def test_parse_release_file(self):
        for input, output in (
            # Examples of release file contents:
            ('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64')),
            ('SUSE LINUX 10.1 (X86-64)', ('SUSE LINUX ', '10.1', 'X86-64')),
            ('SUSE LINUX 10.1 (i586)', ('SUSE LINUX ', '10.1', 'i586')),
            ('Fedora Core release 5 (Bordeaux)', ('Fedora Core', '5', 'Bordeaux')),
            ('Red Hat Linux release 8.0 (Psyche)', ('Red Hat Linux', '8.0', 'Psyche')),
            ('Red Hat Linux release 9 (Shrike)', ('Red Hat Linux', '9', 'Shrike')),
            ('Red Hat Enterprise Linux release 4 (Nahant)', ('Red Hat Enterprise Linux', '4', 'Nahant')),
            ('CentOS release 4', ('CentOS', '4', None)),
            ('Rocks release 4.2.1 (Cydonia)', ('Rocks', '4.2.1', 'Cydonia')),
            ('', ('', '', '')), # If there's nothing there.
            ):
            self.assertEqual(platform._parse_release_file(input), output)
def test_main():
    support.run_unittest(
        PlatformTest
    )
if __name__ == '__main__':
    test_main()
 | 
	gpl-3.0 | -7,698,688,418,364,413,000 | 36.985366 | 113 | 0.516502 | false | 
| 
	promediacorp/flask-blog | 
	post.py | 
	1 | 
	6890 | 
	import datetime
import cgi
from bson.objectid import ObjectId
from helper_functions import *
class Post:
    def __init__(self, default_config):
        self.collection = default_config['POSTS_COLLECTION']
        self.response = {'error': None, 'data': None}
        self.debug_mode = default_config['DEBUG']
    def get_posts(self, limit, skip, tag=None, search=None):
        self.response['error'] = None
        cond = {}
        if tag is not None:
            cond = {'tags': tag}
        elif search is not None:
            cond = {'$or': [
                    {'title': {'$regex': search, '$options': 'i'}},
                    {'body': {'$regex': search, '$options': 'i'}},
                    {'preview': {'$regex': search, '$options': 'i'}}]}
        try:
            cursor = self.collection.find(cond).sort(
                'date', direction=-1).skip(skip).limit(limit)
            self.response['data'] = []
            for post in cursor:
                if 'tags' not in post:
                    post['tags'] = []
                if 'comments' not in post:
                    post['comments'] = []
                if 'preview' not in post:
                    post['preview'] = ''
                self.response['data'].append({'id': post['_id'],
                                              'title': post['title'],
                                              'body': post['body'],
                                              'preview': post['preview'],
                                              'date': post['date'],
                                              'permalink': post['permalink'],
                                              'tags': post['tags'],
                                              'author': post['author'],
                                              'comments': post['comments']})
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Posts not found..'
        return self.response
    def get_post_by_permalink(self, permalink):
        self.response['error'] = None
        try:
            self.response['data'] = self.collection.find_one(
                {'permalink': permalink})
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Post not found..'
        return self.response
    def get_post_by_id(self, post_id):
        self.response['error'] = None
        try:
            self.response['data'] = self.collection.find_one(
                {'_id': ObjectId(post_id)})
            if self.response['data']:
                if 'tags' not in self.response['data']:
                    self.response['data']['tags'] = ''
                else:
                    self.response['data']['tags'] = ','.join(
                        self.response['data']['tags'])
                if 'preview' not in self.response['data']:
                    self.response['data']['preview'] = ''
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Post not found..'
        return self.response
    def get_total_count(self, tag=None, search=None):
        cond = {}
        if tag is not None:
            cond = {'tags': tag}
        elif search is not None:
            cond = {'$or': [
                    {'title': {'$regex': search, '$options': 'i'}},
                    {'body': {'$regex': search, '$options': 'i'}},
                    {'preview': {'$regex': search, '$options': 'i'}}]}
        return self.collection.find(cond).count()
    def get_tags(self):
        self.response['error'] = None
        try:
            self.response['data'] = self.collection.aggregate([
                {'$unwind': '$tags'},
                {'$group': {'_id': '$tags', 'count': {'$sum': 1}}},
                {'$sort': {'count': -1}},
                {'$limit': 10},
                {'$project': {'title': '$_id', 'count': 1, '_id': 0}}
            ])
            if self.response['data']['result']:
                self.response['data'] = self.response['data']['result']
            else:
                self.response['data'] = []
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Get tags error..'
        return self.response
    def create_new_post(self, post_data):
        self.response['error'] = None
        try:
            self.response['data'] = self.collection.insert(post_data)
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Adding post error..'
        return self.response
    def edit_post(self, post_id, post_data):
        self.response['error'] = None
        del post_data['date']
        del post_data['permalink']
        try:
            self.collection.update(
                {'_id': ObjectId(post_id)}, {"$set": post_data}, upsert=False)
            self.response['data'] = True
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Post update error..'
        return self.response
    def delete_post(self, post_id):
        self.response['error'] = None
        try:
            if self.get_post_by_id(post_id) and self.collection.remove({'_id': ObjectId(post_id)}):
                self.response['data'] = True
            else:
                self.response['data'] = False
        except Exception, e:
            self.print_debug_info(e, self.debug_mode)
            self.response['error'] = 'Deleting post error..'
        return self.response
    @staticmethod
    def validate_post_data(post_data):
        # permalink = random_string(12)
        whitespace = re.compile('\s')
        permalink = whitespace.sub("-", post_data['title']).lower()
        post_data['title'] = cgi.escape(post_data['title'])
        post_data['preview'] = cgi.escape(post_data['preview'], quote=True)
        post_data['body'] = cgi.escape(post_data['body'], quote=True)
        post_data['date'] = datetime.datetime.utcnow()
        post_data['permalink'] = permalink
        return post_data
    @staticmethod
    def print_debug_info(msg, show=False):
        if show:
            import sys
            import os
            error_color = '\033[32m'
            error_end = '\033[0m'
            error = {'type': sys.exc_info()[0].__name__,
                     'file': os.path.basename(sys.exc_info()[2].tb_frame.f_code.co_filename),
                     'line': sys.exc_info()[2].tb_lineno,
                     'details': str(msg)}
            print error_color
            print '\n\n---\nError type: %s in file: %s on line: %s\nError details: %s\n---\n\n'\
                  % (error['type'], error['file'], error['line'], error['details'])
            print error_end
 | 
	mit | -2,047,660,337,586,939,400 | 36.650273 | 99 | 0.475907 | false | 
| 
	andrewharvey/asgs-stylesheets | 
	configure.py | 
	1 | 
	4492 | 
	#!/usr/bin/env python
# Copyright (c) 2011, Development Seed, Inc.
#               2011, Andrew Harvey <[email protected]>
#               All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
#     * Redistributions of source code must retain the above copyright
#       notice, this list of conditions and the following disclaimer.
#     * Redistributions in binary form must reproduce the above copyright
#       notice, this list of conditions and the following disclaimer in
#       the documentation and/or other materials provided with the
#       distribution.
#     * Neither the name of the Development Seed, Inc. nor the names of
#       its contributors may be used to endorse or promote products
#       derived from this software without specific prior written
#       permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
from sys import path
from os.path import join
import argparse
#################################
## argparse
parser = argparse.ArgumentParser(description='Configure an MML file with datasource settings')
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default='5432')
parser.add_argument('--dbname', default='abs')
parser.add_argument('--user', default='abs')
parser.add_argument('--password', default='abs')
parser.add_argument('--srs', default='+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over')
parser.add_argument('--shapedir', default='./layers/')
# Increase performance if you are only rendering a particular area by
# specifying a bounding box to restrict queries. Format is "XMIN,YMIN,XMAX,YMAX" in the
# same units as the database (probably spherical mercator meters). The
# whole world is "-20037508.34,-20037508.34,20037508.34,20037508.34".
# Leave blank to let Mapnik estimate.
parser.add_argument('--extent', default='12570320.00,-5403474.50,17711958.00,-1636391.88')
parser.add_argument('--mml', required=True)
args = parser.parse_args()
#################################
## configure mml
mml = join(path[0], args.mml + '/' + args.mml + '.mml')
shoreline_300 = args.shapedir.rstrip('/') + '/shoreline_300.shp'
processed_p = args.shapedir.rstrip('/') + '/processed_p.shp'
with open(mml, 'r') as f:
  newf = json.loads(f.read())
f.closed
with open(mml, 'w') as f:
  for layer in newf["Layer"]:
    if "Datasource" in layer:
      ds_type = layer["Datasource"].get("type")
      if ds_type and ds_type == "postgis":
        layer["Datasource"]["host"] = args.host
        layer["Datasource"]["port"] = args.port
        layer["Datasource"]["dbname"] = args.dbname
        layer["Datasource"]["user"] = args.user
        layer["Datasource"]["password"] = args.password
        layer["Datasource"]["extent"] = args.extent
        layer["srs"] = args.srs
    else:
      if layer["id"] == "shoreline_300":
        layer["Datasource"] = dict();
        layer["Datasource"]["file"] = shoreline_300
        layer["Datasource"]["type"] = 'shape'
        layer["geometry"] = 'polygon'
        layer["srs"] = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
      elif layer["id"] == "processed_p":
        layer["Datasource"] = dict();
        layer["Datasource"]["file"] = processed_p
        layer["Datasource"]["type"] = 'shape'
        layer["geometry"] = 'polygon'
        layer["srs"] = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
  f.write(json.dumps(newf, indent=2))
f.closed
 | 
	cc0-1.0 | 5,764,988,682,530,616,000 | 42.61165 | 168 | 0.672752 | false | 
| 
	pedromagnus/kilink | 
	kilink/kilink.py | 
	1 | 
	8947 | 
	# Copyright 2011-2013 Facundo Batista
# All Rigths Reserved
"""The server and main app for kilink."""
import json
import logging
import time
from functools import update_wrapper
from flask import (
    Flask,
    jsonify,
    make_response,
    redirect,
    render_template,
    request,
)
from flask.ext.assets import Environment
from flask_babel import Babel
from flask_babel import gettext as _
from sqlalchemy import create_engine
import backend
import loghelper
from config import config, LANGUAGES
from decorators import crossdomain
from metrics import StatsdClient
# set up flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config["STATIC_URL"] = 'static'
app.config["STATIC_ROOT"] = 'static'
app.config["PROPAGATE_EXCEPTIONS"] = False
babel = Babel(app)
# flask-assets
assets = Environment(app)
assets.init_app(app)
# logger
logger = logging.getLogger('kilink.kilink')
# metrics
metrics = StatsdClient("linkode")
def nocache(f):
    """Decorator to make a page un-cacheable."""
    def new_func(*args, **kwargs):
        """The new function."""
        resp = make_response(f(*args, **kwargs))
        resp.headers['Cache-Control'] = 'public, max-age=0'
        return resp
    return update_wrapper(new_func, f)
def measure(metric_name):
    """Decorator generator to send metrics counting and with timing."""
    def _decorator(oldf):
        """The decorator itself."""
        def newf(*args, **kwargs):
            """The function to replace."""
            tini = time.time()
            try:
                result = oldf(*args, **kwargs)
            except Exception, exc:
                name = "%s.error.%s" % (metric_name, exc.__class__.__name__)
                metrics.count(name, 1)
                raise
            else:
                tdelta = time.time() - tini
                metrics.count(metric_name + '.ok', 1)
                metrics.timing(metric_name, tdelta)
                return result
        # need to fix the name because it's used by flask
        newf.func_name = oldf.func_name
        return newf
    return _decorator
# accesory pages
@app.route('/about')
@measure("about")
def about():
    """Show the about page."""
    return render_template('_about.html')
@app.route('/tools')
@measure("tools")
def tools():
    """Show the tools page."""
    return render_template('_tools.html')
# views
@app.route('/')
@measure("index")
def index():
    """The base page."""
    render_dict = {
        'value': '',
        'button_text': _('Create linkode'),
        'kid_info': '',
        'tree_info': json.dumps(False),
    }
    return render_template('_new.html', **render_dict)
@app.route('/', methods=['POST'])
@measure("server.create")
def create():
    """Create a kilink."""
    content = request.form['content']
    text_type = request.form['text_type']
    logger.debug("Create start; type=%r size=%d", text_type, len(content))
    if text_type[:6] == "auto: ":
        text_type = text_type[6:]
    klnk = kilinkbackend.create_kilink(content, text_type)
    url = "/%s" % (klnk.kid,)
    logger.debug("Create done; kid=%s", klnk.kid)
    return redirect(url, code=303)
@app.route('/<kid>', methods=['POST'])
@app.route('/<kid>/<parent>', methods=['POST'])
@measure("server.update")
def update(kid, parent=None):
    """Update a kilink."""
    content = request.form['content']
    text_type = request.form['text_type']
    logger.debug("Update start; kid=%r parent=%r type=%r size=%d",
                 kid, parent, text_type, len(content))
    if parent is None:
        root = kilinkbackend.get_root_node(kid)
        parent = root.revno
    klnk = kilinkbackend.update_kilink(kid, parent, content, text_type)
    new_url = "/%s/%s" % (kid, klnk.revno)
    logger.debug("Update done; kid=%r revno=%r", klnk.kid, klnk.revno)
    return redirect(new_url, code=303)
@app.route('/<kid>')
@app.route('/<kid>/<revno>')
@app.route('/l/<kid>')
@app.route('/l/<kid>/<revno>')
@nocache
@measure("server.show")
def show(kid, revno=None):
    """Show the kilink content"""
    # get the content
    logger.debug("Show start; kid=%r revno=%r", kid, revno)
    if revno is None:
        klnk = kilinkbackend.get_root_node(kid)
        revno = klnk.revno
    else:
        klnk = kilinkbackend.get_kilink(kid, revno)
    content = klnk.content
    text_type = klnk.text_type
    # get the tree
    tree, nodeq = build_tree(kid, revno)
    render_dict = {
        'value': content,
        'button_text': _('Save new version'),
        'kid_info': "%s/%s" % (kid, revno),
        'tree_info': json.dumps(tree) if tree != {} else False,
        'current_revno': revno,
        'text_type': text_type,
    }
    logger.debug("Show done; quantity=%d", nodeq)
    return render_template('_new.html', **render_dict)
def build_tree(kid, revno):
    """Build the tree for a given kilink id."""
    nodes = []
    for treenode in kilinkbackend.get_kilink_tree(kid):
        url = "/%s/%s" % (kid, treenode.revno)
        parent = treenode.parent
        nodes.append({
            'order': treenode.order,
            'parent': parent,
            'revno': treenode.revno,
            'url': url,
            'timestamp': str(treenode.timestamp),
            'selected': treenode.revno == revno,
        })
    root = [n for n in nodes if n['parent'] is None][0]
    fringe = [root, ]
    while fringe:
        node = fringe.pop()
        children = [n for n in nodes if n['parent'] == node['revno']]
        node['contents'] = children
        fringe.extend(children)
    return root, len(nodes)
#API
@app.route('/api/1/linkodes/', methods=['POST'])
@crossdomain(origin='*')
@measure("api.create")
def api_create():
    """Create a kilink."""
    content = request.form['content']
    text_type = request.form.get('text_type', "")
    logger.debug("API create start; type=%r size=%d", text_type, len(content))
    klnk = kilinkbackend.create_kilink(content, text_type)
    ret_json = jsonify(linkode_id=klnk.kid, revno=klnk.revno)
    response = make_response(ret_json)
    response.headers['Location'] = 'http://%s/%s/%s' % (
        config["server_host"], klnk.kid, klnk.revno)
    logger.debug("API create done; kid=%s", klnk.kid)
    return response, 201
@app.route('/api/1/linkodes/<kid>', methods=['POST'])
@crossdomain(origin='*')
@measure("api.update")
def api_update(kid):
    """Update a kilink."""
    content = request.form['content']
    parent = request.form['parent']
    text_type = request.form['text_type']
    logger.debug("API update start; kid=%r parent=%r type=%r size=%d",
                 kid, parent, text_type, len(content))
    try:
        klnk = kilinkbackend.update_kilink(kid, parent, content, text_type)
    except backend.KilinkNotFoundError:
        logger.debug("API update done; kid %r not found", kid)
        response = make_response()
        return response, 404
    logger.debug("API update done; kid=%r revno=%r", klnk.kid, klnk.revno)
    ret_json = jsonify(revno=klnk.revno)
    response = make_response(ret_json)
    response.headers['Location'] = 'http://%s/%s/%s' % (
        config["server_host"], klnk.kid, klnk.revno)
    return response, 201
@app.route('/api/1/linkodes/<kid>/<revno>', methods=['GET'])
@app.route('/api/1/linkodes/<kid>', methods=['GET'])
@crossdomain(origin='*')
@measure("api.get")
def api_get(kid, revno=None):
    """Get the kilink and revno content"""
    logger.debug("API get; kid=%r revno=%r", kid, revno)
    if revno is None:
        klnk = kilinkbackend.get_root_node(kid)
        revno = klnk.revno
    else:
        klnk = kilinkbackend.get_kilink(kid, revno)
    # get the tree
    tree, nodeq = build_tree(kid, revno)
    logger.debug("API get done; type=%r size=%d len_tree=%d",
                 klnk.text_type, len(klnk.content), nodeq)
    ret_json = jsonify(content=klnk.content, text_type=klnk.text_type,
                       tree=tree)
    return ret_json
@app.errorhandler(backend.KilinkNotFoundError)
def handle_not_found_error(error):
    """Return 404 on kilink not found"""
    if request.url_rule.endpoint.startswith('api_'):
        response = jsonify({'message': error.message})
    else:
        response = render_template('_404.html')
    logger.debug(error.message)
    return response, 404
@babel.localeselector
def get_locale():
    """Return the best matched language supported."""
    return request.accept_languages.best_match(LANGUAGES.keys())
if __name__ == "__main__":
    # load config
    config.load_file("configs/development.yaml")
    # log setup
    handlers = loghelper.setup_logging(config['log_directory'], verbose=True)
    for h in handlers:
        app.logger.addHandler(h)
        h.setLevel(logging.DEBUG)
    app.logger.setLevel(logging.DEBUG)
    # set up the backend
    engine = create_engine(config["db_engine"], echo=True)
    kilinkbackend = backend.KilinkBackend(engine)
    app.run(debug=True, host='0.0.0.0')
 | 
	gpl-3.0 | -927,307,090,625,272,600 | 27.676282 | 78 | 0.612831 | false | 
| 
	locationlabs/confab | 
	confab/options.py | 
	1 | 
	4182 | 
	"""
Options for managing Confab.
"""
from os import getcwd
from os.path import basename
from fabric.api import env, task
from fabric.utils import _AttributeDict
from difflib import unified_diff
from magic import Magic
from re import match
def _should_render(mime_type):
    """
    Return whether a template file of with a particular mime type
    should be rendered.
    Some files may need to be excluded from template rendering;
    such files will be copied verbatim.
    """
    return next((True for pattern in ['text/', 'application/xml'] if match(pattern, mime_type)),
                False)
def _is_empty(mime_type):
    """
    Return whether a template file is an empty file.
    """
    return mime_type == 'inode/x-empty'
def _is_not_temporary(file_name):
    """
    Return whether a file name does not represent a temporary file.
    When listing configuration files, we usually want temporary
    files to be ignored.
    """
    return not file_name.endswith('~')
def _is_not_internal(file_name):
    """
    Return whether a file name does not represent internal usage.
    When listing configuration files, we want to omit internal
    files, especially if they are used as Jinja includes
    """
    return not basename(file_name).startswith('_')
def _filter_func(file_name):
    """
    Return the default filter func, which excludes temporary and internal files.
    """
    return _is_not_temporary(file_name) and _is_not_internal(file_name)
def _get_mime_type(file_name):
    """
    Return the mime type of a file.
    The mime_type will be used to determine if a configuration file is text.
    """
    return Magic(mime=True).from_file(file_name)
def _diff(a, b, fromfile=None, tofile=None):
    """
    Return a diff using '---', '+++', and '@@' control lines.
    By default, uses unified_diff.
    """
    return unified_diff(a, b, fromfile=fromfile, tofile=tofile)
def _as_dict(module):
    """
    Returns publicly names values in module's __dict__.
    """
    try:
        return {k: v for k, v in module.__dict__.iteritems() if not k[0:1] == '_'}
    except AttributeError:
        return {}
def _get_base_dir():
    """
    Returns the base directory for user's template and data directories.
    """
    return env.environmentdef.directory or getcwd()
# Options that control how confab runs.
#
# These are in opposition to options likely to changed
# between different runs of confab, such as directories,
# environments, roles, hosts, etc.
options = _AttributeDict({
    # Should yes be assumed for interactive prompts?
    'assume_yes': False,
    # How to compute a file's mime_type?
    'get_mime_type': _get_mime_type,
    # How to determine if a template should be rendered?
    'should_render': _should_render,
    # How to determine if a template is an empty file?
    'is_empty': _is_empty,
    # How do filter available templates within the jinja environment?
    'filter_func': _filter_func,
    # How to determine diffs?
    'diff': _diff,
    # How to get dictionary configuration from module data?
    'module_as_dict': _as_dict,
    # Base directory for template and data directories.
    'get_base_dir': _get_base_dir,
    # What is the name of the template directory?
    'get_templates_dir': lambda: 'templates',
    # What is the name of the data directory?
    'get_data_dir': lambda: 'data',
    # What is the name of the generated directory?
    'get_generated_dir': lambda: 'generated',
    # What is the name of the remotes directory?
    'get_remotes_dir': lambda: 'remotes',
})
class Options(object):
    """
    Context manager to temporarily set options.
    """
    def __init__(self, **kwargs):
        self.kwargs = kwargs
        self.previous = {}
    def __enter__(self):
        for (k, v) in self.kwargs.iteritems():
            self.previous[k] = options.get(k)
            options[k] = v
        return self
    def __exit__(self, exc_type, value, traceback):
        for k in self.kwargs.keys():
            options[k] = self.previous[k]
@task
def assume_yes():
    """
    Set the option to ``assume_yes`` in other tasks.
    """
    options.assume_yes = True
 | 
	apache-2.0 | -6,501,699,333,116,751,000 | 24.345455 | 96 | 0.646341 | false | 
| 
	jpartogi/django-job-board | 
	job_board/urls.py | 
	1 | 
	1230 | 
	from django.conf.urls.defaults import *
from django.contrib.sitemaps import FlatPageSitemap, GenericSitemap
from job_board.models import Job
from job_board.feeds import JobFeed
from job_board.forms import JobForm
from job_board.views import JobFormPreview, job_list, job_detail, job_search
feeds = {
    'jobs': JobFeed,
}
info_dict = {
    'queryset': Job.objects.filter(),
    'date_field': 'posted'
}
sitemaps = {
    'flatpages': FlatPageSitemap,
    'jobs': GenericSitemap(info_dict, priority=0.6),
}
urlpatterns = patterns('',
    url(r'^feed/(?P<url>.*)/$',
        'django.contrib.syndication.views.feed',
        {'feed_dict': feeds},
        name='job-feeds'),
    url(r'^sitemap.xml$',
        'django.contrib.sitemaps.views.sitemap',
        {'sitemaps': sitemaps},
        name='job-sitemap'),
    
    url(r'^new/$',
        JobFormPreview(JobForm),
        name='job-form'),
    url(r'^(?P<object_id>\d+)/(?P<slug>[\w-]+)/$',
        job_detail,
        name='job-detail'),
    url(r'^wmd/', include('wmd.urls')),
    url(r'^search/$',
        job_search,
        name='job-search'),
    url(r'^$',
        job_list,
        name='job-list'), # This must be last after everything else has been evaluated
) | 
	bsd-3-clause | 4,233,101,760,313,957,000 | 23.137255 | 86 | 0.604065 | false | 
| 
	coco-project/coco | 
	coco/core/auth/authentication_backends.py | 
	1 | 
	3945 | 
	from coco.contract.errors import AuthenticationError, ConnectionError, \
    UserNotFoundError
from coco.core.helpers import get_internal_ldap_connected, get_user_backend_connected
from coco.core.models import BackendGroup, BackendUser, \
    CollaborationGroup
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
import logging
logger = logging.getLogger(__name__)
class BackendProxyAuthentication(object):
    """
    Class used to authenticate with the user backends.
    more info: https://docs.djangoproject.com/en/1.8/topics/auth/default/#django.contrib.auth.authenticate
    """
    def authenticate(self, username=None, password=None):
        """
        :inherit.
        """
        # check if the user already exists in our system
        # if so, use the defined backend_pk for validating the credentials on the backend
        # if its a Django only user, disallow the login
        user = None
        if User.objects.filter(username=username).exists():
            user = User.objects.get(username=username)
            if hasattr(user, 'backend_user'):
                username = user.backend_user.backend_pk
            else:
                return None  # not allowed, Django only user
        try:
            internal_ldap = get_internal_ldap_connected()
            user_backend = get_user_backend_connected()
            user_backend.auth_user(username, password)
            if user is not None:  # existing user
                if not user.check_password(password):
                    user.set_password(password)  # XXX: not needed. should we leave it empty?
                    internal_ldap.set_user_password(username, password)
                    user.save()
            else:  # new user
                uid = BackendUser.generate_internal_uid()
                group = self.create_user_groups(username, uid)
                user = self.create_users(username, password, uid, group.backend_group)
                group.add_user(user.backend_user)
            if user.is_active:
                return user
            else:
                return None
        except AuthenticationError:
            raise PermissionDenied
        except UserNotFoundError:
            if user is not None:  # exists locally but not on backend
                user.delete()
        except ConnectionError as ex:
            logger.exception(ex)
            return None
        finally:
            try:
                internal_ldap.disconnect()
                user_backend.disconnect()
            except:
                pass
    def create_user_groups(self, name, gid):
        """
        Create the groups for the logging-in user.
        :param name: The name of the group to create.
        :param gid: The group's ID (on the backend).
        """
        collaboration_group = CollaborationGroup(
            name=name,
            is_single_user_group=True
        )
        collaboration_group.save()
        backend_group = BackendGroup(
            django_group=collaboration_group,
            backend_id=gid,
            backend_pk=name
        )
        backend_group.save()
        return collaboration_group
    def create_users(self, username, password, uid, primary_group):
        """
        Create the Django users for the logging-in user.
        :param username: The user's username.
        :param primary_group: The user's primary group.
        """
        user = User(username=username, password=password)
        user.save()
        backend_user = BackendUser(
            django_user=user,
            backend_id=uid,
            backend_pk=username,
            primary_group=primary_group
        )
        backend_user.save()
        return user
    def get_user(self, user_id):
        """
        :inherit.
        """
        try:
            return User.objects.get(pk=user_id)
        except User.DoesNotExist:
            return None
 | 
	bsd-3-clause | -1,043,595,838,817,111,200 | 33.008621 | 106 | 0.586565 | false | 
| 
	ADL175/data-structures | 
	src/graph.py | 
	1 | 
	7286 | 
	"""Creates a Graph data structure, featuring graph traversal and two shortest path algorithms."""
import timeit
import random
class Graph(object):
    """Define the Graph class structure."""
    def __init__(self):
        """Make an empty dictionary."""
        self.graph_dict = {}
    def add_node(self, value):
        """Check if node of given value exists in dictionary.If not, add it with an empty list."""
        try:
            self.graph_dict[value]
        except KeyError:
            self.graph_dict[value] = []
    def add_edge(self, val1, val2, weight=0):
        """Ensure that nodes of val1 and val2 exist (creating them if they don't.Then make an edge connecting val1 to val2."""
        if [val1, val2, weight] not in self.edges():
            self.add_node(val1)
            self.add_node(val2)
            self.graph_dict[val1].append([val2, weight])
        else:
            raise KeyError("Edge already exists.")
    def nodes(self):
        """Return a list of all keys in dictionary."""
        return list(self.graph_dict.keys())
    def edges(self):
        """Return a list of all edges in dictionary."""
        to_return = []
        for keys, values in self.graph_dict.items():
            for i in values:
                to_return.append([keys, i[0], i[1]])
        return to_return
    def del_node(self, val):
        """Delete a node from the graph, and from all edge pointers."""
        try:
            del self.graph_dict[val]
            for keys, values in self.graph_dict.items():
                for i in values:
                    if i[0] == val:
                        values.remove(i)
        except KeyError:
            raise KeyError("No such node exists.")
    def del_edge(self, val1, val2):
        """Delete an edge from graph."""
        try:
            for node in self.graph_dict[val1]:
                if node[0] == val2:
                    self.graph_dict[val1].remove(node)
        except KeyError:
            raise KeyError("No such node exists.")
    def has_node(self, val):
        """Check if graph has a given node in it."""
        try:
            self.graph_dict[val]
            return True
        except KeyError:
            return False
    def neighbors(self, val):
        """Return all nodes connected to given node."""
        try:
            return self.graph_dict[val]
        except KeyError:
            raise KeyError("No such node exists.")
    def adjacent(self, val1, val2):
        """Return True if edge exists, else return false."""
        try:
            self.graph_dict[val2]
            return len(list(filter(lambda node: node[0] == val2, self.graph_dict[val1]))) > 0
        except KeyError:
            raise KeyError("Value given not in graph.")
    def depth_first_traversal(self, val):
        """Return a list of all nodes connected to given start pointbased on a depth first algorithm."""
        from stack import Stack
        seen = []
        next_up = Stack()
        try:
            while True:
                if val not in seen:
                    seen.append(val)
                    for i in self.graph_dict[val][::-1]:
                        next_up.push(i)
                if len(next_up) == 0:
                    break
                val = next_up.pop()[0]
            return seen
        except KeyError:
            raise KeyError('Given value does not exist.')
    def breadth_first_traversal(self, val):
        """Return a list of all nodes connected to given start pointbased on a breadth first algorithm."""
        from que_ import Queue
        seen = []
        next_up = Queue()
        try:
            while True:
                if val not in seen:
                    seen.append(val)
                    for i in self.graph_dict[val]:
                        next_up.enqueue(i)
                if next_up.size() == 0:
                    break
                val = next_up.dequeue()[0]
            return seen
        except KeyError:
            raise KeyError('Given value does not exist.')
    def dijkstra(self, val1, val2):
        """An implementation of Dijkstra's shortest path algorithm.
        Makes use of a priority queue to find the shortest path between two nodes.
        Returns the distance of the node from the original, as well as the most
        optimal path.
        """
        from priorityq import Priority_Q
        the_list = self.nodes()
        the_queue = Priority_Q()
        to_return = {}
        for i in the_list:
            if i == val1:
                the_queue.insert(i, self.graph_dict[i], 0)
            else:
                the_queue.insert(i, self.graph_dict[i])
        while len(the_queue.heap.heap) > 0:
            current = the_queue.pop()
            for neighbor in current['neighbors']:
                alt = current['dist'] + neighbor[1]
                the_queue.decrease_priority(neighbor[0], alt, current['value'])
            to_return[current['value']] = current
        path = []
        curr = to_return[val2]
        while True:
            path.append(curr['value'])
            if curr['prev']:
                curr = to_return[curr['prev']]
            else:
                return [to_return[val2]['dist'], path[::-1]]
    def bellman_ford(self, vertex_source, target):
        """An implementation the Bellman Ford shortest path algorithm.
        Makes use of a priority queue to find the shortest path between two nodes.
        Returns the distance of the node from the original, as well as the most
        optimal path.
        """
        vertices = self.nodes()
        list_edges = self.edges()
        distance = {}
        predecessor = {}
        for vertex_v in vertices:
            distance[vertex_v] = float('inf')
            predecessor[vertex_v] = None
        distance[vertex_source] = 0
        for i in range(len(vertices)):
            for ji in list_edges:
                if distance[ji[0]] + ji[2] < distance[ji[1]]:
                    distance[ji[1]] = distance[ji[0]] + ji[2]
                    predecessor[ji[1]] = ji[0]
        for i in list_edges:
            if distance[i[0]] + i[2] < distance[i[1]]:
                raise ValueError('Graph contains a negative-weight cycle')
        path = []
        curr = target
        while True:
            path.append(curr)
            if predecessor[curr]:
                curr = predecessor[curr]
            else:
                return [distance[target], path[::-1]]
def wrapper(func, *args, **kwargs):  #pragma no cover
    """Create a value for a function with a specific arguement called to it."""
    def wrapped():
        return func(*args, **kwargs)
    return wrapped
    #code found at http://pythoncentral.io/time-a-python-function/
if __name__ == '__main__':
    #This block included for the requirement portion of the traversals assignment.
    time_test_graph = Graph()
    for i in range(100):
        time_test_graph.add_edge(i, i*2 + 1)
        time_test_graph.add_edge(i, i*2 + 2)
    wrapped1 = wrapper(time_test_graph.breadth_first_traversal, 0)
    wrapped2 = wrapper(time_test_graph.depth_first_traversal, 0)
    print("Breadth first: ", timeit.timeit(wrapped1, number=10000))
    print("Depth first: ", timeit.timeit(wrapped2, number=10000))
 | 
	mit | -7,696,677,436,234,390,000 | 34.198068 | 126 | 0.542136 | false | 
| 
	Harmon758/discord.py | 
	discord/client.py | 
	1 | 
	48861 | 
	"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import logging
import signal
import sys
import traceback
from typing import Any, Generator, List, Optional, Sequence, TYPE_CHECKING, TypeVar, Union
import aiohttp
from .user import User
from .invite import Invite
from .template import Template
from .widget import Widget
from .guild import Guild
from .channel import _channel_factory
from .enums import ChannelType
from .mentions import AllowedMentions
from .errors import *
from .enums import Status, VoiceRegion
from .flags import ApplicationFlags
from .gateway import *
from .activity import BaseActivity, create_activity
from .voice_client import VoiceClient
from .http import HTTPClient
from .state import ConnectionState
from . import utils
from .object import Object
from .backoff import ExponentialBackoff
from .webhook import Webhook
from .iterators import GuildIterator
from .appinfo import AppInfo
from .ui.view import View
from .stage_instance import StageInstance
__all__ = (
    'Client',
)
if TYPE_CHECKING:
    from .abc import SnowflakeTime
log = logging.getLogger(__name__)
def _cancel_tasks(loop):
    tasks = {t for t in asyncio.all_tasks(loop=loop) if not t.done()}
    if not tasks:
        return
    log.info('Cleaning up after %d tasks.', len(tasks))
    for task in tasks:
        task.cancel()
    loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
    log.info('All tasks finished cancelling.')
    for task in tasks:
        if task.cancelled():
            continue
        if task.exception() is not None:
            loop.call_exception_handler({
                'message': 'Unhandled exception during Client.run shutdown.',
                'exception': task.exception(),
                'task': task
            })
def _cleanup_loop(loop):
    try:
        _cancel_tasks(loop)
        loop.run_until_complete(loop.shutdown_asyncgens())
    finally:
        log.info('Closing the event loop.')
        loop.close()
class Client:
    r"""Represents a client connection that connects to Discord.
    This class is used to interact with the Discord WebSocket and API.
    A number of options can be passed to the :class:`Client`.
    Parameters
    -----------
    max_messages: Optional[:class:`int`]
        The maximum number of messages to store in the internal message cache.
        This defaults to ``1000``. Passing in ``None`` disables the message cache.
        .. versionchanged:: 1.3
            Allow disabling the message cache and change the default size to ``1000``.
    loop: Optional[:class:`asyncio.AbstractEventLoop`]
        The :class:`asyncio.AbstractEventLoop` to use for asynchronous operations.
        Defaults to ``None``, in which case the default event loop is used via
        :func:`asyncio.get_event_loop()`.
    connector: :class:`aiohttp.BaseConnector`
        The connector to use for connection pooling.
    proxy: Optional[:class:`str`]
        Proxy URL.
    proxy_auth: Optional[:class:`aiohttp.BasicAuth`]
        An object that represents proxy HTTP Basic Authorization.
    shard_id: Optional[:class:`int`]
        Integer starting at ``0`` and less than :attr:`.shard_count`.
    shard_count: Optional[:class:`int`]
        The total number of shards.
    application_id: :class:`int`
        The client's application ID.
    intents: :class:`Intents`
        The intents that you want to enable for the session. This is a way of
        disabling and enabling certain gateway events from triggering and being sent.
        If not given, defaults to a regularly constructed :class:`Intents` class.
        .. versionadded:: 1.5
    member_cache_flags: :class:`MemberCacheFlags`
        Allows for finer control over how the library caches members.
        If not given, defaults to cache as much as possible with the
        currently selected intents.
        .. versionadded:: 1.5
    chunk_guilds_at_startup: :class:`bool`
        Indicates if :func:`.on_ready` should be delayed to chunk all guilds
        at start-up if necessary. This operation is incredibly slow for large
        amounts of guilds. The default is ``True`` if :attr:`Intents.members`
        is ``True``.
        .. versionadded:: 1.5
    status: Optional[:class:`.Status`]
        A status to start your presence with upon logging on to Discord.
    activity: Optional[:class:`.BaseActivity`]
        An activity to start your presence with upon logging on to Discord.
    allowed_mentions: Optional[:class:`AllowedMentions`]
        Control how the client handles mentions by default on every message sent.
        .. versionadded:: 1.4
    heartbeat_timeout: :class:`float`
        The maximum numbers of seconds before timing out and restarting the
        WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if
        processing the initial packets take too long to the point of disconnecting
        you. The default timeout is 60 seconds.
    guild_ready_timeout: :class:`float`
        The maximum number of seconds to wait for the GUILD_CREATE stream to end before
        preparing the member cache and firing READY. The default timeout is 2 seconds.
        .. versionadded:: 1.4
    assume_unsync_clock: :class:`bool`
        Whether to assume the system clock is unsynced. This applies to the ratelimit handling
        code. If this is set to ``True``, the default, then the library uses the time to reset
        a rate limit bucket given by Discord. If this is ``False`` then your system clock is
        used to calculate how long to sleep for. If this is set to ``False`` it is recommended to
        sync your system clock to Google's NTP server.
        .. versionadded:: 1.3
    Attributes
    -----------
    ws
        The websocket gateway the client is currently connected to. Could be ``None``.
    loop: :class:`asyncio.AbstractEventLoop`
        The event loop that the client uses for asynchronous operations.
    """
    def __init__(self, *, loop=None, **options):
        self.ws = None
        self.loop = asyncio.get_event_loop() if loop is None else loop
        self._listeners = {}
        self.shard_id = options.get('shard_id')
        self.shard_count = options.get('shard_count')
        connector = options.pop('connector', None)
        proxy = options.pop('proxy', None)
        proxy_auth = options.pop('proxy_auth', None)
        unsync_clock = options.pop('assume_unsync_clock', True)
        self.http = HTTPClient(connector, proxy=proxy, proxy_auth=proxy_auth, unsync_clock=unsync_clock, loop=self.loop)
        self._handlers = {
            'ready': self._handle_ready
        }
        self._hooks = {
            'before_identify': self._call_before_identify_hook
        }
        self._connection = self._get_state(**options)
        self._connection.shard_count = self.shard_count
        self._closed = False
        self._ready = asyncio.Event()
        self._connection._get_websocket = self._get_websocket
        self._connection._get_client = lambda: self
        if VoiceClient.warn_nacl:
            VoiceClient.warn_nacl = False
            log.warning("PyNaCl is not installed, voice will NOT be supported")
    # internals
    def _get_websocket(self, guild_id=None, *, shard_id=None):
        return self.ws
    def _get_state(self, **options):
        return ConnectionState(dispatch=self.dispatch, handlers=self._handlers,
                               hooks=self._hooks, http=self.http, loop=self.loop, **options)
    def _handle_ready(self):
        self._ready.set()
    @property
    def latency(self):
        """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
        This could be referred to as the Discord WebSocket protocol latency.
        """
        ws = self.ws
        return float('nan') if not ws else ws.latency
    def is_ws_ratelimited(self):
        """:class:`bool`: Whether the websocket is currently rate limited.
        This can be useful to know when deciding whether you should query members
        using HTTP or via the gateway.
        .. versionadded:: 1.6
        """
        if self.ws:
            return self.ws.is_ratelimited()
        return False
    @property
    def user(self):
        """Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in."""
        return self._connection.user
    @property
    def guilds(self):
        """List[:class:`.Guild`]: The guilds that the connected client is a member of."""
        return self._connection.guilds
    @property
    def emojis(self):
        """List[:class:`.Emoji`]: The emojis that the connected client has."""
        return self._connection.emojis
    @property
    def cached_messages(self):
        """Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached.
        .. versionadded:: 1.1
        """
        return utils.SequenceProxy(self._connection._messages or [])
    @property
    def private_channels(self):
        """List[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on.
        .. note::
            This returns only up to 128 most recent private channels due to an internal working
            on how Discord deals with private channels.
        """
        return self._connection.private_channels
    @property
    def voice_clients(self):
        """List[:class:`.VoiceProtocol`]: Represents a list of voice connections.
        These are usually :class:`.VoiceClient` instances.
        """
        return self._connection.voice_clients
    @property
    def application_id(self):
        """Optional[:class:`int`]: The client's application ID.
        If this is not passed via ``__init__`` then this is retrieved
        through the gateway when an event contains the data. Usually
        after :func:`~discord.on_connect` is called.
        """
        return self._connection.application_id
    @property
    def application_flags(self) -> ApplicationFlags:
        """:class:`~discord.ApplicationFlags`: The client's application flags.
        .. versionadded: 2.0
        """
        return self._connection.application_flags  # type: ignore
    def is_ready(self):
        """:class:`bool`: Specifies if the client's internal cache is ready for use."""
        return self._ready.is_set()
    async def _run_event(self, coro, event_name, *args, **kwargs):
        try:
            await coro(*args, **kwargs)
        except asyncio.CancelledError:
            pass
        except Exception:
            try:
                await self.on_error(event_name, *args, **kwargs)
            except asyncio.CancelledError:
                pass
    def _schedule_event(self, coro, event_name, *args, **kwargs):
        wrapped = self._run_event(coro, event_name, *args, **kwargs)
        # Schedules the task
        return asyncio.create_task(wrapped, name=f'discord.py: {event_name}')
    def dispatch(self, event, *args, **kwargs):
        log.debug('Dispatching event %s', event)
        method = 'on_' + event
        listeners = self._listeners.get(event)
        if listeners:
            removed = []
            for i, (future, condition) in enumerate(listeners):
                if future.cancelled():
                    removed.append(i)
                    continue
                try:
                    result = condition(*args)
                except Exception as exc:
                    future.set_exception(exc)
                    removed.append(i)
                else:
                    if result:
                        if len(args) == 0:
                            future.set_result(None)
                        elif len(args) == 1:
                            future.set_result(args[0])
                        else:
                            future.set_result(args)
                        removed.append(i)
            if len(removed) == len(listeners):
                self._listeners.pop(event)
            else:
                for idx in reversed(removed):
                    del listeners[idx]
        try:
            coro = getattr(self, method)
        except AttributeError:
            pass
        else:
            self._schedule_event(coro, method, *args, **kwargs)
    async def on_error(self, event_method, *args, **kwargs):
        """|coro|
        The default error handler provided by the client.
        By default this prints to :data:`sys.stderr` however it could be
        overridden to have a different implementation.
        Check :func:`~discord.on_error` for more details.
        """
        print(f'Ignoring exception in {event_method}', file=sys.stderr)
        traceback.print_exc()
    # hooks
    async def _call_before_identify_hook(self, shard_id, *, initial=False):
        # This hook is an internal hook that actually calls the public one.
        # It allows the library to have its own hook without stepping on the
        # toes of those who need to override their own hook.
        await self.before_identify_hook(shard_id, initial=initial)
    async def before_identify_hook(self, shard_id, *, initial=False):
        """|coro|
        A hook that is called before IDENTIFYing a session. This is useful
        if you wish to have more control over the synchronization of multiple
        IDENTIFYing clients.
        The default implementation sleeps for 5 seconds.
        .. versionadded:: 1.4
        Parameters
        ------------
        shard_id: :class:`int`
            The shard ID that requested being IDENTIFY'd
        initial: :class:`bool`
            Whether this IDENTIFY is the first initial IDENTIFY.
        """
        if not initial:
            await asyncio.sleep(5.0)
    # login state management
    async def login(self, token):
        """|coro|
        Logs in the client with the specified credentials.
        Parameters
        -----------
        token: :class:`str`
            The authentication token. Do not prefix this token with
            anything as the library will do it for you.
        Raises
        ------
        :exc:`.LoginFailure`
            The wrong credentials are passed.
        :exc:`.HTTPException`
            An unknown HTTP related error occurred,
            usually when it isn't 200 or the known incorrect credentials
            passing status code.
        """
        log.info('logging in using static token')
        await self.http.static_login(token.strip())
    async def connect(self, *, reconnect=True):
        """|coro|
        Creates a websocket connection and lets the websocket listen
        to messages from Discord. This is a loop that runs the entire
        event system and miscellaneous aspects of the library. Control
        is not resumed until the WebSocket connection is terminated.
        Parameters
        -----------
        reconnect: :class:`bool`
            If we should attempt reconnecting, either due to internet
            failure or a specific failure on Discord's part. Certain
            disconnects that lead to bad state will not be handled (such as
            invalid sharding payloads or bad tokens).
        Raises
        -------
        :exc:`.GatewayNotFound`
            If the gateway to connect to Discord is not found. Usually if this
            is thrown then there is a Discord API outage.
        :exc:`.ConnectionClosed`
            The websocket connection has been terminated.
        """
        backoff = ExponentialBackoff()
        ws_params = {
            'initial': True,
            'shard_id': self.shard_id,
        }
        while not self.is_closed():
            try:
                coro = DiscordWebSocket.from_client(self, **ws_params)
                self.ws = await asyncio.wait_for(coro, timeout=60.0)
                ws_params['initial'] = False
                while True:
                    await self.ws.poll_event()
            except ReconnectWebSocket as e:
                log.info('Got a request to %s the websocket.', e.op)
                self.dispatch('disconnect')
                ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)
                continue
            except (OSError,
                    HTTPException,
                    GatewayNotFound,
                    ConnectionClosed,
                    aiohttp.ClientError,
                    asyncio.TimeoutError) as exc:
                self.dispatch('disconnect')
                if not reconnect:
                    await self.close()
                    if isinstance(exc, ConnectionClosed) and exc.code == 1000:
                        # clean close, don't re-raise this
                        return
                    raise
                if self.is_closed():
                    return
                # If we get connection reset by peer then try to RESUME
                if isinstance(exc, OSError) and exc.errno in (54, 10054):
                    ws_params.update(sequence=self.ws.sequence, initial=False, resume=True, session=self.ws.session_id)
                    continue
                # We should only get this when an unhandled close code happens,
                # such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)
                # sometimes, discord sends us 1000 for unknown reasons so we should reconnect
                # regardless and rely on is_closed instead
                if isinstance(exc, ConnectionClosed):
                    if exc.code == 4014:
                        raise PrivilegedIntentsRequired(exc.shard_id) from None
                    if exc.code != 1000:
                        await self.close()
                        raise
                retry = backoff.delay()
                log.exception("Attempting a reconnect in %.2fs", retry)
                await asyncio.sleep(retry)
                # Always try to RESUME the connection
                # If the connection is not RESUME-able then the gateway will invalidate the session.
                # This is apparently what the official Discord client does.
                ws_params.update(sequence=self.ws.sequence, resume=True, session=self.ws.session_id)
    async def close(self):
        """|coro|
        Closes the connection to Discord.
        """
        if self._closed:
            return
        self._closed = True
        for voice in self.voice_clients:
            try:
                await voice.disconnect()
            except Exception:
                # if an error happens during disconnects, disregard it.
                pass
        if self.ws is not None and self.ws.open:
            await self.ws.close(code=1000)
        await self.http.close()
        self._ready.clear()
    def clear(self):
        """Clears the internal state of the bot.
        After this, the bot can be considered "re-opened", i.e. :meth:`is_closed`
        and :meth:`is_ready` both return ``False`` along with the bot's internal
        cache cleared.
        """
        self._closed = False
        self._ready.clear()
        self._connection.clear()
        self.http.recreate()
    async def start(self, token, *, reconnect=True):
        """|coro|
        A shorthand coroutine for :meth:`login` + :meth:`connect`.
        Raises
        -------
        TypeError
            An unexpected keyword argument was received.
        """
        await self.login(token)
        await self.connect(reconnect=reconnect)
    def run(self, *args, **kwargs):
        """A blocking call that abstracts away the event loop
        initialisation from you.
        If you want more control over the event loop then this
        function should not be used. Use :meth:`start` coroutine
        or :meth:`connect` + :meth:`login`.
        Roughly Equivalent to: ::
            try:
                loop.run_until_complete(start(*args, **kwargs))
            except KeyboardInterrupt:
                loop.run_until_complete(close())
                # cancel all tasks lingering
            finally:
                loop.close()
        .. warning::
            This function must be the last function to call due to the fact that it
            is blocking. That means that registration of events or anything being
            called after this function call will not execute until it returns.
        """
        loop = self.loop
        try:
            loop.add_signal_handler(signal.SIGINT, lambda: loop.stop())
            loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop())
        except NotImplementedError:
            pass
        async def runner():
            try:
                await self.start(*args, **kwargs)
            finally:
                if not self.is_closed():
                    await self.close()
        def stop_loop_on_completion(f):
            loop.stop()
        future = asyncio.ensure_future(runner(), loop=loop)
        future.add_done_callback(stop_loop_on_completion)
        try:
            loop.run_forever()
        except KeyboardInterrupt:
            log.info('Received signal to terminate bot and event loop.')
        finally:
            future.remove_done_callback(stop_loop_on_completion)
            log.info('Cleaning up tasks.')
            _cleanup_loop(loop)
        if not future.cancelled():
            try:
                return future.result()
            except KeyboardInterrupt:
                # I am unsure why this gets raised here but suppress it anyway
                return None
    # properties
    def is_closed(self):
        """:class:`bool`: Indicates if the websocket connection is closed."""
        return self._closed
    @property
    def activity(self):
        """Optional[:class:`.BaseActivity`]: The activity being used upon
        logging in.
        """
        return create_activity(self._connection._activity)
    @activity.setter
    def activity(self, value):
        if value is None:
            self._connection._activity = None
        elif isinstance(value, BaseActivity):
            self._connection._activity = value.to_dict()
        else:
            raise TypeError('activity must derive from BaseActivity.')
    @property
    def allowed_mentions(self):
        """Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration.
        .. versionadded:: 1.4
        """
        return self._connection.allowed_mentions
    @allowed_mentions.setter
    def allowed_mentions(self, value):
        if value is None or isinstance(value, AllowedMentions):
            self._connection.allowed_mentions = value
        else:
            raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__!r}')
    @property
    def intents(self):
        """:class:`~discord.Intents`: The intents configured for this connection.
        .. versionadded:: 1.5
        """
        return self._connection.intents
    # helpers/getters
    @property
    def users(self):
        """List[:class:`~discord.User`]: Returns a list of all the users the bot can see."""
        return list(self._connection._users.values())
    def get_channel(self, id):
        """Returns a channel with the given ID.
        Parameters
        -----------
        id: :class:`int`
            The ID to search for.
        Returns
        --------
        Optional[Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]]
            The returned channel or ``None`` if not found.
        """
        return self._connection.get_channel(id)
    def get_stage_instance(self, id) -> Optional[StageInstance]:
        """Returns a stage instance with the given stage channel ID.
        .. versionadded:: 2.0
        Parameters
        -----------
        id: :class:`int`
            The ID to search for.
        Returns
        --------
        Optional[:class:`StageInstance`]
            The returns stage instance of ``None`` if not found.
        """
        from .channel import StageChannel
        channel = self._connection.get_channel(id)
        if isinstance(channel, StageChannel):
            return channel.instance
    def get_guild(self, id):
        """Returns a guild with the given ID.
        Parameters
        -----------
        id: :class:`int`
            The ID to search for.
        Returns
        --------
        Optional[:class:`.Guild`]
            The guild or ``None`` if not found.
        """
        return self._connection._get_guild(id)
    def get_user(self, id):
        """Returns a user with the given ID.
        Parameters
        -----------
        id: :class:`int`
            The ID to search for.
        Returns
        --------
        Optional[:class:`~discord.User`]
            The user or ``None`` if not found.
        """
        return self._connection.get_user(id)
    def get_emoji(self, id):
        """Returns an emoji with the given ID.
        Parameters
        -----------
        id: :class:`int`
            The ID to search for.
        Returns
        --------
        Optional[:class:`.Emoji`]
            The custom emoji or ``None`` if not found.
        """
        return self._connection.get_emoji(id)
    def get_all_channels(self):
        """A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'.
        This is equivalent to: ::
            for guild in client.guilds:
                for channel in guild.channels:
                    yield channel
        .. note::
            Just because you receive a :class:`.abc.GuildChannel` does not mean that
            you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should
            be used for that.
        Yields
        ------
        :class:`.abc.GuildChannel`
            A channel the client can 'access'.
        """
        for guild in self.guilds:
            yield from guild.channels
    def get_all_members(self):
        """Returns a generator with every :class:`.Member` the client can see.
        This is equivalent to: ::
            for guild in client.guilds:
                for member in guild.members:
                    yield member
        Yields
        ------
        :class:`.Member`
            A member the client can see.
        """
        for guild in self.guilds:
            yield from guild.members
    # listeners/waiters
    async def wait_until_ready(self):
        """|coro|
        Waits until the client's internal cache is all ready.
        """
        await self._ready.wait()
    def wait_for(self, event, *, check=None, timeout=None):
        """|coro|
        Waits for a WebSocket event to be dispatched.
        This could be used to wait for a user to reply to a message,
        or to react to a message, or to edit a message in a self-contained
        way.
        The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default,
        it does not timeout. Note that this does propagate the
        :exc:`asyncio.TimeoutError` for you in case of timeout and is provided for
        ease of use.
        In case the event returns multiple arguments, a :class:`tuple` containing those
        arguments is returned instead. Please check the
        :ref:`documentation <discord-api-events>` for a list of events and their
        parameters.
        This function returns the **first event that meets the requirements**.
        Examples
        ---------
        Waiting for a user reply: ::
            @client.event
            async def on_message(message):
                if message.content.startswith('$greet'):
                    channel = message.channel
                    await channel.send('Say hello!')
                    def check(m):
                        return m.content == 'hello' and m.channel == channel
                    msg = await client.wait_for('message', check=check)
                    await channel.send(f'Hello {msg.author}!')
        Waiting for a thumbs up reaction from the message author: ::
            @client.event
            async def on_message(message):
                if message.content.startswith('$thumb'):
                    channel = message.channel
                    await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate')
                    def check(reaction, user):
                        return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}'
                    try:
                        reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check)
                    except asyncio.TimeoutError:
                        await channel.send('\N{THUMBS DOWN SIGN}')
                    else:
                        await channel.send('\N{THUMBS UP SIGN}')
        Parameters
        ------------
        event: :class:`str`
            The event name, similar to the :ref:`event reference <discord-api-events>`,
            but without the ``on_`` prefix, to wait for.
        check: Optional[Callable[..., :class:`bool`]]
            A predicate to check what to wait for. The arguments must meet the
            parameters of the event being waited for.
        timeout: Optional[:class:`float`]
            The number of seconds to wait before timing out and raising
            :exc:`asyncio.TimeoutError`.
        Raises
        -------
        asyncio.TimeoutError
            If a timeout is provided and it was reached.
        Returns
        --------
        Any
            Returns no arguments, a single argument, or a :class:`tuple` of multiple
            arguments that mirrors the parameters passed in the
            :ref:`event reference <discord-api-events>`.
        """
        future = self.loop.create_future()
        if check is None:
            def _check(*args):
                return True
            check = _check
        ev = event.lower()
        try:
            listeners = self._listeners[ev]
        except KeyError:
            listeners = []
            self._listeners[ev] = listeners
        listeners.append((future, check))
        return asyncio.wait_for(future, timeout)
    # event registration
    def event(self, coro):
        """A decorator that registers an event to listen to.
        You can find more info about the events on the :ref:`documentation below <discord-api-events>`.
        The events must be a :ref:`coroutine <coroutine>`, if not, :exc:`TypeError` is raised.
        Example
        ---------
        .. code-block:: python3
            @client.event
            async def on_ready():
                print('Ready!')
        Raises
        --------
        TypeError
            The coroutine passed is not actually a coroutine.
        """
        if not asyncio.iscoroutinefunction(coro):
            raise TypeError('event registered must be a coroutine function')
        setattr(self, coro.__name__, coro)
        log.debug('%s has successfully been registered as an event', coro.__name__)
        return coro
    async def change_presence(self, *, activity=None, status=None, afk=False):
        """|coro|
        Changes the client's presence.
        Example
        ---------
        .. code-block:: python3
            game = discord.Game("with the API")
            await client.change_presence(status=discord.Status.idle, activity=game)
        Parameters
        ----------
        activity: Optional[:class:`.BaseActivity`]
            The activity being done. ``None`` if no currently active activity is done.
        status: Optional[:class:`.Status`]
            Indicates what status to change to. If ``None``, then
            :attr:`.Status.online` is used.
        afk: Optional[:class:`bool`]
            Indicates if you are going AFK. This allows the discord
            client to know how to handle push notifications better
            for you in case you are actually idle and not lying.
        Raises
        ------
        :exc:`.InvalidArgument`
            If the ``activity`` parameter is not the proper type.
        """
        if status is None:
            status = 'online'
            status_enum = Status.online
        elif status is Status.offline:
            status = 'invisible'
            status_enum = Status.offline
        else:
            status_enum = status
            status = str(status)
        await self.ws.change_presence(activity=activity, status=status, afk=afk)
        for guild in self._connection.guilds:
            me = guild.me
            if me is None:
                continue
            if activity is not None:
                me.activities = (activity,)
            else:
                me.activities = ()
            me.status = status_enum
    # Guild stuff
    def fetch_guilds(self, *, limit: int = 100, before: SnowflakeTime = None, after: SnowflakeTime = None) -> List[Guild]:
        """Retrieves an :class:`.AsyncIterator` that enables receiving your guilds.
        .. note::
            Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`,
            :attr:`.Guild.id`, and :attr:`.Guild.name` per :class:`.Guild`.
        .. note::
            This method is an API call. For general usage, consider :attr:`guilds` instead.
        Examples
        ---------
        Usage ::
            async for guild in client.fetch_guilds(limit=150):
                print(guild.name)
        Flattening into a list ::
            guilds = await client.fetch_guilds(limit=150).flatten()
            # guilds is now a list of Guild...
        All parameters are optional.
        Parameters
        -----------
        limit: Optional[:class:`int`]
            The number of guilds to retrieve.
            If ``None``, it retrieves every guild you have access to. Note, however,
            that this would make it a slow operation.
            Defaults to ``100``.
        before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
            Retrieves guilds before this date or object.
            If a datetime is provided, it is recommended to use a UTC aware datetime.
            If the datetime is naive, it is assumed to be local time.
        after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
            Retrieve guilds after this date or object.
            If a datetime is provided, it is recommended to use a UTC aware datetime.
            If the datetime is naive, it is assumed to be local time.
        Raises
        ------
        :exc:`.HTTPException`
            Getting the guilds failed.
        Yields
        --------
        :class:`.Guild`
            The guild with the guild data parsed.
        """
        return GuildIterator(self, limit=limit, before=before, after=after)
    async def fetch_template(self, code):
        """|coro|
        Gets a :class:`.Template` from a discord.new URL or code.
        Parameters
        -----------
        code: Union[:class:`.Template`, :class:`str`]
            The Discord Template Code or URL (must be a discord.new URL).
        Raises
        -------
        :exc:`.NotFound`
            The template is invalid.
        :exc:`.HTTPException`
            Getting the template failed.
        Returns
        --------
        :class:`.Template`
            The template from the URL/code.
        """
        code = utils.resolve_template(code)
        data = await self.http.get_template(code)
        return Template(data=data, state=self._connection) # type: ignore
    async def fetch_guild(self, guild_id):
        """|coro|
        Retrieves a :class:`.Guild` from an ID.
        .. note::
            Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`,
            :attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
        .. note::
            This method is an API call. For general usage, consider :meth:`get_guild` instead.
        Parameters
        -----------
        guild_id: :class:`int`
            The guild's ID to fetch from.
        Raises
        ------
        :exc:`.Forbidden`
            You do not have access to the guild.
        :exc:`.HTTPException`
            Getting the guild failed.
        Returns
        --------
        :class:`.Guild`
            The guild from the ID.
        """
        data = await self.http.get_guild(guild_id)
        return Guild(data=data, state=self._connection)
    async def create_guild(self, name: str, region: Optional[VoiceRegion] = None, icon: Any = None, *, code: str = None):
        """|coro|
        Creates a :class:`.Guild`.
        Bot accounts in more than 10 guilds are not allowed to create guilds.
        Parameters
        ----------
        name: :class:`str`
            The name of the guild.
        region: :class:`.VoiceRegion`
            The region for the voice communication server.
            Defaults to :attr:`.VoiceRegion.us_west`.
        icon: :class:`bytes`
            The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`
            for more details on what is expected.
        code: Optional[:class:`str`]
            The code for a template to create the guild with.
            .. versionadded:: 1.4
        Raises
        ------
        :exc:`.HTTPException`
            Guild creation failed.
        :exc:`.InvalidArgument`
            Invalid icon image format given. Must be PNG or JPG.
        Returns
        -------
        :class:`.Guild`
            The guild created. This is not the same guild that is
            added to cache.
        """
        if icon is not None:
            icon = utils._bytes_to_base64_data(icon)
        region = region or VoiceRegion.us_west
        region_value = region.value
        if code:
            data = await self.http.create_from_template(code, name, region_value, icon)
        else:
            data = await self.http.create_guild(name, region_value, icon)
        return Guild(data=data, state=self._connection)
    async def fetch_stage_instance(self, channel_id: int) -> StageInstance:
        """|coro|
        Gets a :class:`StageInstance` for a stage channel id.
        .. versionadded:: 2.0
        Parameters
        -----------
        channel_id: :class:`int`
            The stage channel ID.
        Raises
        -------
        :exc:`.NotFound`
            The stage instance or channel could not be found.
        :exc:`.HTTPException`
            Getting the stage instance failed.
        Returns
        --------
        :class:`StageInstance`
            The stage instance from the stage channel ID.
        """
        data = await self.http.get_stage_instance(channel_id)
        guild = self.get_guild(int(data['guild_id']))
        return StageInstance(guild=guild, state=self._connection, data=data)  # type: ignore
    # Invite management
    async def fetch_invite(self, url: Union[Invite, str], *, with_counts: bool = True, with_expiration: bool = True) -> Invite:
        """|coro|
        Gets an :class:`.Invite` from a discord.gg URL or ID.
        .. note::
            If the invite is for a guild you have not joined, the guild and channel
            attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and
            :class:`.PartialInviteChannel` respectively.
        Parameters
        -----------
        url: Union[:class:`.Invite`, :class:`str`]
            The Discord invite ID or URL (must be a discord.gg URL).
        with_counts: :class:`bool`
            Whether to include count information in the invite. This fills the
            :attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count`
            fields.
        with_expiration: :class:`bool`
            Whether to include the expiration date of the invite. This fills the
            :attr:`.Invite.expires_at` field.
            .. versionadded:: 2.0
        Raises
        -------
        :exc:`.NotFound`
            The invite has expired or is invalid.
        :exc:`.HTTPException`
            Getting the invite failed.
        Returns
        --------
        :class:`.Invite`
            The invite from the URL/ID.
        """
        invite_id = utils.resolve_invite(url)
        data = await self.http.get_invite(invite_id, with_counts=with_counts, with_expiration=with_expiration)
        return Invite.from_incomplete(state=self._connection, data=data)
    async def delete_invite(self, invite: Union[Invite, str]) -> None:
        """|coro|
        Revokes an :class:`.Invite`, URL, or ID to an invite.
        You must have the :attr:`~.Permissions.manage_channels` permission in
        the associated guild to do this.
        Parameters
        ----------
        invite: Union[:class:`.Invite`, :class:`str`]
            The invite to revoke.
        Raises
        -------
        :exc:`.Forbidden`
            You do not have permissions to revoke invites.
        :exc:`.NotFound`
            The invite is invalid or expired.
        :exc:`.HTTPException`
            Revoking the invite failed.
        """
        invite_id = utils.resolve_invite(invite)
        await self.http.delete_invite(invite_id)
    # Miscellaneous stuff
    async def fetch_widget(self, guild_id):
        """|coro|
        Gets a :class:`.Widget` from a guild ID.
        .. note::
            The guild must have the widget enabled to get this information.
        Parameters
        -----------
        guild_id: :class:`int`
            The ID of the guild.
        Raises
        -------
        :exc:`.Forbidden`
            The widget for this guild is disabled.
        :exc:`.HTTPException`
            Retrieving the widget failed.
        Returns
        --------
        :class:`.Widget`
            The guild's widget.
        """
        data = await self.http.get_widget(guild_id)
        return Widget(state=self._connection, data=data)
    async def application_info(self):
        """|coro|
        Retrieves the bot's application information.
        Raises
        -------
        :exc:`.HTTPException`
            Retrieving the information failed somehow.
        Returns
        --------
        :class:`.AppInfo`
            The bot's application information.
        """
        data = await self.http.application_info()
        if 'rpc_origins' not in data:
            data['rpc_origins'] = None
        return AppInfo(self._connection, data)
    async def fetch_user(self, user_id):
        """|coro|
        Retrieves a :class:`~discord.User` based on their ID.
        You do not have to share any guilds with the user to get this information,
        however many operations do require that you do.
        .. note::
            This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead.
        Parameters
        -----------
        user_id: :class:`int`
            The user's ID to fetch from.
        Raises
        -------
        :exc:`.NotFound`
            A user with this ID does not exist.
        :exc:`.HTTPException`
            Fetching the user failed.
        Returns
        --------
        :class:`~discord.User`
            The user you requested.
        """
        data = await self.http.get_user(user_id)
        return User(state=self._connection, data=data)
    async def fetch_channel(self, channel_id):
        """|coro|
        Retrieves a :class:`.abc.GuildChannel` or :class:`.abc.PrivateChannel` with the specified ID.
        .. note::
            This method is an API call. For general usage, consider :meth:`get_channel` instead.
        .. versionadded:: 1.2
        Raises
        -------
        :exc:`.InvalidData`
            An unknown channel type was received from Discord.
        :exc:`.HTTPException`
            Retrieving the channel failed.
        :exc:`.NotFound`
            Invalid Channel ID.
        :exc:`.Forbidden`
            You do not have permission to fetch this channel.
        Returns
        --------
        Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]
            The channel from the ID.
        """
        data = await self.http.get_channel(channel_id)
        factory, ch_type = _channel_factory(data['type'])
        if factory is None:
            raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))
        if ch_type in (ChannelType.group, ChannelType.private):
            channel = factory(me=self.user, data=data, state=self._connection)
        else:
            guild_id = int(data['guild_id'])
            guild = self.get_guild(guild_id) or Object(id=guild_id)
            channel = factory(guild=guild, state=self._connection, data=data)
        return channel
    async def fetch_webhook(self, webhook_id):
        """|coro|
        Retrieves a :class:`.Webhook` with the specified ID.
        Raises
        --------
        :exc:`.HTTPException`
            Retrieving the webhook failed.
        :exc:`.NotFound`
            Invalid webhook ID.
        :exc:`.Forbidden`
            You do not have permission to fetch this webhook.
        Returns
        ---------
        :class:`.Webhook`
            The webhook you requested.
        """
        data = await self.http.get_webhook(webhook_id)
        return Webhook.from_state(data, state=self._connection)
    async def create_dm(self, user):
        """|coro|
        Creates a :class:`.DMChannel` with this user.
        This should be rarely called, as this is done transparently for most
        people.
        .. versionadded:: 2.0
        Parameters
        -----------
        user: :class:`~discord.abc.Snowflake`
            The user to create a DM with.
        Returns
        -------
        :class:`.DMChannel`
            The channel that was created.
        """
        state = self._connection
        found = state._get_private_channel_by_user(user.id)
        if found:
            return found
        data = await state.http.start_private_message(user.id)
        return state.add_dm_channel(data)
    def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:
        """Registers a :class:`~discord.ui.View` for persistent listening.
        This method should be used for when a view is comprised of components
        that last longer than the lifecycle of the program.
        Parameters
        ------------
        view: :class:`discord.ui.View`
            The view to register for dispatching.
        message_id: Optional[:class:`int`]
            The message ID that the view is attached to. This is currently used to
            refresh the view's state during message update events. If not given
            then message update events are not propagated for the view.
        Raises
        -------
        TypeError
            A view was not passed.
        ValueError
            The view is not persistent. A persistent view has no timeout
            and all their components have an explicitly provided custom_id.
        """
        if not isinstance(view, View):
            raise TypeError(f'expected an instance of View not {view.__class__!r}')
        if not view.is_persistent():
            raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')
        self._connection.store_view(view, message_id)
    @property
    def persistent_views(self) -> Sequence[View]:
        """Sequence[:class:`View`]: A sequence of persistent views added to the client."""
        return self._connection.persistent_views
 | 
	mit | -2,710,249,609,055,593,000 | 32.238776 | 144 | 0.579951 | false | 
| 
	twoodford/audiovisualizer | 
	build-movie.py | 
	1 | 
	1391 | 
	import os
import os.path
import subprocess
import sys
from PIL import Image
LISTF = "_list.txt"
def get_dimensions(fpath):
    #print(fpath)
    return Image.open(fpath).size
def run(folder, outfile, framerate=30, outres=(1920,1080)):
    jpglist = [os.path.join(folder, f) for f in os.listdir(folder) if f.startswith("frame_")]
    dimen = get_dimensions(jpglist[0])
    ratio = float(outres[1])/outres[0]
    if dimen[0]*ratio < dimen[1]:
        crop = (dimen[0], int(dimen[0]*ratio))
    else:
        crop = (int(dimen[1]/ratio), dimen[1])
    with open(LISTF, "w") as ltxt:
        for f in jpglist:
            ltxt.write("file '"+f+"'\n")
    fsel_args = ["-f", "concat", "-i", LISTF]
    rs_str = "".join(("crop=", str(crop[0]), ":", str(crop[1]),":0:0,scale=",str(outres[0]),":",str(outres[1])))
    enc_flags = ["-pix_fmt", "yuv420p", "-preset", "veryslow", "-crf", "18"]
    args_final = ["ffmpeg", "-r", str(framerate)] + fsel_args + ["-vf", rs_str] + enc_flags + [outfile]
    print(" ".join(args_final))
    subprocess.call(args_final)
    os.remove(LISTF)
if __name__=="__main__":
    jpglist = [os.path.join(sys.argv[1], f) for f in os.listdir(sys.argv[1]) if f.startswith("frame_")]
    dimen = get_dimensions(jpglist[0])
    dimen = (dimen[0] if dimen[0]%2==0 else dimen[0]-1, dimen[1] if dimen[1]%2==0 else dimen[1]-1)
    run(sys.argv[1], sys.argv[2], outres=dimen)
 | 
	apache-2.0 | -3,590,627,063,179,411,500 | 37.638889 | 112 | 0.594536 | false | 
| 
	dsweet04/rekall | 
	rekall-lib/rekall_lib/registry.py | 
	1 | 
	5206 | 
	# Rekall Memory Forensics
# Copyright (C) 2011
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Michael Cohen <[email protected]>
#
# ******************************************************
#
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
# *****************************************************
""" This module implements a class registry.
We scan the memory_plugins directory for all python files and add those classes
which should be registered into their own lookup tables. These are then ordered
as required. The rest of Rekall Memory Forensics will then call onto the
registered classes when needed.
The MetaclassRegistry automatically adds any derived class to the base
class. This means that we do not need to go through a special initializating
step, as soon as a module is imported, the plugin is registered.
"""
__author__ = "Michael Cohen <[email protected]>"
class classproperty(property):
    """A property that can be called on classes."""
    def __get__(self, cls, owner):
        return self.fget(owner)
def memoize(f):
    cache = {}
    def helper(*args):
        cached = cache.get(args, memoize)
        if cached is not memoize:
            return cached
        cached = f(*args)
        cache[args] = cached
        return cached
    return helper
class UniqueObjectIdMetaclass(type):
    """Give each object a unique ID.
    unlike id() this number will not be reused when the objects are destroyed,
    hence it can be used to identify identical objects without keeping these
    around.
    """
    ID = 0
    def __call__(cls, *args, **kwargs):
        res = super(UniqueObjectIdMetaclass, cls).__call__(*args, **kwargs)
        res._object_id = UniqueObjectIdMetaclass.ID  # pylint: disable=protected-access
        UniqueObjectIdMetaclass.ID += 1
        return res
class UniqueObjectIdMixin(object):
    __metaclass__ = UniqueObjectIdMetaclass
class MetaclassRegistry(UniqueObjectIdMetaclass):
    """Automatic Plugin Registration through metaclasses."""
    def __init__(cls, name, bases, env_dict):
        super(MetaclassRegistry, cls).__init__(name, bases, env_dict)
        cls._install_constructors(cls)
        # Attach the classes dict to the baseclass and have all derived classes
        # use the same one:
        for base in bases:
            try:
                cls.classes = base.classes
                cls.classes_by_name = base.classes_by_name
                cls.plugin_feature = base.plugin_feature
                cls.top_level_class = base.top_level_class
                break
            except AttributeError:
                cls.classes = {}
                cls.classes_by_name = {}
                cls.plugin_feature = cls.__name__
                # Keep a reference to the top level class
                cls.top_level_class = cls
        # The following should not be registered as they are abstract. Classes
        # are abstract if the have the __abstract attribute (note this is not
        # inheritable so each abstract class must be explicitely marked).
        abstract_attribute = "_%s__abstract" % name
        if getattr(cls, abstract_attribute, None):
            return
        if not cls.__name__.startswith("Abstract"):
            if cls.__name__ in cls.classes:
                raise RuntimeError(
                    "Multiple definitions for class %s (%s)" % (
                        cls, cls.classes[cls.__name__]))
            cls.classes[cls.__name__] = cls
            name = getattr(cls, "name", None)
            # We expect that classes by name will collide, which is why each
            # value is a list of classes with that name.
            cls.classes_by_name.setdefault(name, []).append(cls)
            try:
                if cls.top_level_class.include_plugins_as_attributes:
                    setattr(cls.top_level_class, cls.__name__, cls)
            except AttributeError:
                pass
        # Allow the class itself to initialize itself.
        cls_initializer = getattr(cls, "_class_init", None)
        if cls_initializer:
            cls_initializer()
    @classmethod
    def _install_constructors(mcs, cls):
        def ByName(self, name):
            for impl in self.classes.values():
                if getattr(impl, "name", None) == name:
                    return impl
        cls.ImplementationByName = classmethod(ByName)
        def ByClass(self, name):
            return self.classes.get(name)
        cls.ImplementationByClass = classmethod(ByClass)
 | 
	gpl-2.0 | -1,171,829,265,467,452,700 | 33.939597 | 87 | 0.619478 | false | 
| 
	AutorestCI/azure-sdk-for-python | 
	azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/available_providers_list_country.py | 
	1 | 
	1381 | 
	# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AvailableProvidersListCountry(Model):
    """Country details.
    :param country_name: The country name.
    :type country_name: str
    :param providers: A list of Internet service providers.
    :type providers: list[str]
    :param states: List of available states in the country.
    :type states:
     list[~azure.mgmt.network.v2017_11_01.models.AvailableProvidersListState]
    """
    _attribute_map = {
        'country_name': {'key': 'countryName', 'type': 'str'},
        'providers': {'key': 'providers', 'type': '[str]'},
        'states': {'key': 'states', 'type': '[AvailableProvidersListState]'},
    }
    def __init__(self, country_name=None, providers=None, states=None):
        super(AvailableProvidersListCountry, self).__init__()
        self.country_name = country_name
        self.providers = providers
        self.states = states
 | 
	mit | -3,422,688,781,120,634,400 | 36.324324 | 77 | 0.604634 | false | 
| 
	droark/bitcoin | 
	test/functional/rpc_rawtransaction.py | 
	1 | 
	24591 | 
	#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the rawtransaction RPCs.
Test the following RPCs:
   - createrawtransaction
   - signrawtransactionwithwallet
   - sendrawtransaction
   - decoderawtransaction
   - getrawtransaction
"""
from collections import OrderedDict
from decimal import Decimal
from io import BytesIO
from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes_bi, hex_str_to_bytes
class multidict(dict):
    """Dictionary that allows duplicate keys.
    Constructed with a list of (key, value) tuples. When dumped by the json module,
    will output invalid json with repeated keys, eg:
    >>> json.dumps(multidict([(1,2),(1,2)])
    '{"1": 2, "1": 2}'
    Used to test calls to rpc methods with repeated keys in the json object."""
    def __init__(self, x):
        dict.__init__(self, x)
        self.x = x
    def items(self):
        return self.x
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 3
        self.extra_args = [["-addresstype=legacy", "-txindex"], ["-addresstype=legacy", "-txindex"], ["-addresstype=legacy", "-txindex"]]
    def skip_test_if_missing_module(self):
        self.skip_if_no_wallet()
    def setup_network(self):
        super().setup_network()
        connect_nodes_bi(self.nodes, 0, 2)
    def run_test(self):
        self.log.info('prepare some coins for multiple *rawtransaction commands')
        self.nodes[2].generate(1)
        self.sync_all()
        self.nodes[0].generate(101)
        self.sync_all()
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
        self.sync_all()
        self.nodes[0].generate(5)
        self.sync_all()
        self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
        block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
        assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
        self.log.info('Check parameter types and required parameters of createrawtransaction')
        # Test `createrawtransaction` required parameters
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
        # Test `createrawtransaction` invalid extra parameters
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
        # Test `createrawtransaction` invalid `inputs`
        txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
        assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {})
        assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {})
        assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {})
        assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
        assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
        # Test `createrawtransaction` invalid `outputs`
        address = self.nodes[0].getnewaddress()
        address2 = self.nodes[0].getnewaddress()
        assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
        self.nodes[0].createrawtransaction(inputs=[], outputs={})  # Should not throw for backwards compatibility
        self.nodes[0].createrawtransaction(inputs=[], outputs=[])
        assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
        assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
        assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
        assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
        assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
        assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
        assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
        assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
        assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
        assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
        # Test `createrawtransaction` invalid `locktime`
        assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
        assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
        assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)
        # Test `createrawtransaction` invalid `replaceable`
        assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
        self.log.info('Check that createrawtransaction accepts an array and object as outputs')
        tx = CTransaction()
        # One output
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
        assert_equal(len(tx.vout), 1)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
        )
        # Two outputs
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
        assert_equal(len(tx.vout), 2)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
        )
        # Multiple mixed outputs
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
        assert_equal(len(tx.vout), 3)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
        )
        for type in ["bech32", "p2sh-segwit", "legacy"]:
            addr = self.nodes[0].getnewaddress("", type)
            addrinfo = self.nodes[0].getaddressinfo(addr)
            pubkey = addrinfo["scriptPubKey"]
            self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type))
            # Test `signrawtransactionwithwallet` invalid `prevtxs`
            inputs  = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}]
            outputs = { self.nodes[0].getnewaddress() : 1 }
            rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)
            prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
            succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
            assert succ["complete"]
            if type == "legacy":
                del prevtx["amount"]
                succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
                assert succ["complete"]
            if type != "legacy":
                assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                    {
                        "txid": txid,
                        "scriptPubKey": pubkey,
                        "vout": 3,
                    }
                ])
            assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "txid": txid,
                    "scriptPubKey": pubkey,
                    "amount": 1,
                }
            ])
            assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "scriptPubKey": pubkey,
                    "vout": 3,
                    "amount": 1,
                }
            ])
            assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "txid": txid,
                    "vout": 3,
                    "amount": 1
                }
            ])
        #########################################
        # sendrawtransaction with missing input #
        #########################################
        self.log.info('sendrawtransaction with missing input')
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
        outputs = { self.nodes[0].getnewaddress() : 4.998 }
        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
        rawtx   = self.nodes[2].signrawtransactionwithwallet(rawtx)
        # This will raise an exception since there are missing inputs
        assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
        #####################################
        # getrawtransaction with block hash #
        #####################################
        # make a tx by sending then generate 2 blocks; block1 has the tx in it
        tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
        block1, block2 = self.nodes[2].generate(2)
        self.sync_all()
        # We should be able to get the raw transaction by providing the correct block
        gottx = self.nodes[0].getrawtransaction(tx, True, block1)
        assert_equal(gottx['txid'], tx)
        assert_equal(gottx['in_active_chain'], True)
        # We should not have the 'in_active_chain' flag when we don't provide a block
        gottx = self.nodes[0].getrawtransaction(tx, True)
        assert_equal(gottx['txid'], tx)
        assert 'in_active_chain' not in gottx
        # We should not get the tx if we provide an unrelated block
        assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2)
        # An invalid block hash should raise the correct errors
        assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True)
        assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar")
        assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234")
        assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000")
        assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
        # Undo the blocks and check in_active_chain
        self.nodes[0].invalidateblock(block1)
        gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
        assert_equal(gottx['in_active_chain'], False)
        self.nodes[0].reconsiderblock(block1)
        assert_equal(self.nodes[0].getbestblockhash(), block2)
        #########################
        # RAW TX MULTISIG TESTS #
        #########################
        # 2of2 test
        addr1 = self.nodes[2].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()
        addr1Obj = self.nodes[2].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)
        # Tests for createmultisig and addmultisigaddress
        assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
        self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys
        assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.
        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
        #use balance deltas instead of absolute values
        bal = self.nodes[2].getbalance()
        # send 1.2 BTC to msig adr
        txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
        # 2of3 test from different nodes
        bal = self.nodes[2].getbalance()
        addr1 = self.nodes[1].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()
        addr3 = self.nodes[2].getnewaddress()
        addr1Obj = self.nodes[1].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)
        addr3Obj = self.nodes[2].getaddressinfo(addr3)
        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
        txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
        decTx = self.nodes[0].gettransaction(txId)
        rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        #THIS IS AN INCOMPLETE FEATURE
        #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
        assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
        txDetails = self.nodes[0].gettransaction(txId, True)
        rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
        vout = False
        for outpoint in rawTx['vout']:
            if outpoint['value'] == Decimal('2.20000000'):
                vout = outpoint
                break
        bal = self.nodes[0].getbalance()
        inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
        outputs = { self.nodes[0].getnewaddress() : 2.19 }
        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
        assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
        rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
        assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
        self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
        rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
        # 2of2 test for combining transactions
        bal = self.nodes[2].getbalance()
        addr1 = self.nodes[1].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()
        addr1Obj = self.nodes[1].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)
        self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
        mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
        txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
        decTx = self.nodes[0].gettransaction(txId)
        rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
        txDetails = self.nodes[0].gettransaction(txId, True)
        rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
        vout = False
        for outpoint in rawTx2['vout']:
            if outpoint['value'] == Decimal('2.20000000'):
                vout = outpoint
                break
        bal = self.nodes[0].getbalance()
        inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
        outputs = { self.nodes[0].getnewaddress() : 2.19 }
        rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
        self.log.debug(rawTxPartialSigned1)
        assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
        rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
        self.log.debug(rawTxPartialSigned2)
        assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
        rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
        self.log.debug(rawTxComb)
        self.nodes[2].sendrawtransaction(rawTxComb)
        rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
        # decoderawtransaction tests
        # witness transaction
        encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000"
        decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
        assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
        assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
        # non-witness transaction
        encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
        decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
        assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
        # getrawtransaction tests
        # 1. valid parameters - only supply txid
        txHash = rawTx["hash"]
        assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
        # 2. valid parameters - supply txid and 0 for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
        # 3. valid parameters - supply txid and False for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
        # 4. valid parameters - supply txid and 1 for verbose.
        # We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
        assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
        # 5. valid parameters - supply txid and True for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
        # 6. invalid parameters - supply txid and string "Flase"
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase")
        # 7. invalid parameters - supply txid and empty array
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, [])
        # 8. invalid parameters - supply txid and empty dict
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {})
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx= self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 1000)
        # 9. invalid parameters - sequence number out of range
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
        # 10. invalid parameters - sequence number out of range
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx= self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
        ####################################
        # TRANSACTION VERSION NUMBER TESTS #
        ####################################
        # Test the minimum transaction version number that fits in a signed 32-bit integer.
        tx = CTransaction()
        tx.nVersion = -0x80000000
        rawtx = ToHex(tx)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['version'], -0x80000000)
        # Test the maximum transaction version number that fits in a signed 32-bit integer.
        tx = CTransaction()
        tx.nVersion = 0x7fffffff
        rawtx = ToHex(tx)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['version'], 0x7fffffff)
if __name__ == '__main__':
    RawTransactionsTest().main()
 | 
	mit | -347,579,343,606,388,000 | 55.143836 | 263 | 0.641373 | false | 
| 
	utam0k/c3os | 
	c3os/client.py | 
	1 | 
	1344 | 
	import socket
import json
import time
import multiprocessing as mp
from c3os import utils
from c3os import conf
from c3os import db
from c3os.api.type import APITYPE
CONF = conf.CONF
def start():
    """ Start client service """
    mp.Process(target=client).start()
def client():
    """ client main routine """
    db_pool = db.generate_pool()
    while True:
        send_db(db_pool)
        time.sleep(3.0)
def send_db(db_pool):
    """Information on its DB is sent to other c3os.
    Args:
        db_pool (DBPool): DBPool class.
    Returns:
        None:
    """
    all_instance_info = utils.to_dict(db_pool)
    all_region_names = [name for name in all_instance_info.keys()]
    for dest_region_name, dest in CONF['dests'].items():
        host, port = dest.split(',')
        for region_name in all_region_names:
            if dest_region_name.lower() == region_name.lower():
                all_instance_info.pop(region_name)
                break
        db_info = json.dumps(all_instance_info)
        data = json.dumps({'type': APITYPE.ADD_DB, 'body': db_info})
        try:
            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
                sock.connect((host, int(port)))
                sock.sendall(bytes(data, "utf-8"))
        except:
            print("Error: Connected", host, port)
 | 
	mit | -5,227,906,240,231,535,000 | 23.888889 | 75 | 0.594494 | false | 
| 
	plotly/python-api | 
	packages/python/plotly/plotly/figure_factory/_annotated_heatmap.py | 
	1 | 
	10195 | 
	from __future__ import absolute_import, division
from plotly import exceptions, optional_imports
import plotly.colors as clrs
from plotly.figure_factory import utils
from plotly.graph_objs import graph_objs
from plotly.validators.heatmap import ColorscaleValidator
# Optional imports, may be None for users that only use our core functionality.
np = optional_imports.get_module("numpy")
def validate_annotated_heatmap(z, x, y, annotation_text):
    """
    Annotated-heatmap-specific validations
    Check that if a text matrix is supplied, it has the same
    dimensions as the z matrix.
    See FigureFactory.create_annotated_heatmap() for params
    :raises: (PlotlyError) If z and text matrices do not  have the same
        dimensions.
    """
    if annotation_text is not None and isinstance(annotation_text, list):
        utils.validate_equal_length(z, annotation_text)
        for lst in range(len(z)):
            if len(z[lst]) != len(annotation_text[lst]):
                raise exceptions.PlotlyError(
                    "z and text should have the " "same dimensions"
                )
    if x:
        if len(x) != len(z[0]):
            raise exceptions.PlotlyError(
                "oops, the x list that you "
                "provided does not match the "
                "width of your z matrix "
            )
    if y:
        if len(y) != len(z):
            raise exceptions.PlotlyError(
                "oops, the y list that you "
                "provided does not match the "
                "length of your z matrix "
            )
def create_annotated_heatmap(
    z,
    x=None,
    y=None,
    annotation_text=None,
    colorscale="Plasma",
    font_colors=None,
    showscale=False,
    reversescale=False,
    **kwargs
):
    """
    Function that creates annotated heatmaps
    This function adds annotations to each cell of the heatmap.
    :param (list[list]|ndarray) z: z matrix to create heatmap.
    :param (list) x: x axis labels.
    :param (list) y: y axis labels.
    :param (list[list]|ndarray) annotation_text: Text strings for
        annotations. Should have the same dimensions as the z matrix. If no
        text is added, the values of the z matrix are annotated. Default =
        z matrix values.
    :param (list|str) colorscale: heatmap colorscale.
    :param (list) font_colors: List of two color strings: [min_text_color,
        max_text_color] where min_text_color is applied to annotations for
        heatmap values < (max_value - min_value)/2. If font_colors is not
        defined, the colors are defined logically as black or white
        depending on the heatmap's colorscale.
    :param (bool) showscale: Display colorscale. Default = False
    :param (bool) reversescale: Reverse colorscale. Default = False
    :param kwargs: kwargs passed through plotly.graph_objs.Heatmap.
        These kwargs describe other attributes about the annotated Heatmap
        trace such as the colorscale. For more information on valid kwargs
        call help(plotly.graph_objs.Heatmap)
    Example 1: Simple annotated heatmap with default configuration
    >>> import plotly.figure_factory as ff
    >>> z = [[0.300000, 0.00000, 0.65, 0.300000],
    ...      [1, 0.100005, 0.45, 0.4300],
    ...      [0.300000, 0.00000, 0.65, 0.300000],
    ...      [1, 0.100005, 0.45, 0.00000]]
    >>> fig = ff.create_annotated_heatmap(z)
    >>> fig.show()
    """
    # Avoiding mutables in the call signature
    font_colors = font_colors if font_colors is not None else []
    validate_annotated_heatmap(z, x, y, annotation_text)
    # validate colorscale
    colorscale_validator = ColorscaleValidator()
    colorscale = colorscale_validator.validate_coerce(colorscale)
    annotations = _AnnotatedHeatmap(
        z, x, y, annotation_text, colorscale, font_colors, reversescale, **kwargs
    ).make_annotations()
    if x or y:
        trace = dict(
            type="heatmap",
            z=z,
            x=x,
            y=y,
            colorscale=colorscale,
            showscale=showscale,
            reversescale=reversescale,
            **kwargs
        )
        layout = dict(
            annotations=annotations,
            xaxis=dict(ticks="", dtick=1, side="top", gridcolor="rgb(0, 0, 0)"),
            yaxis=dict(ticks="", dtick=1, ticksuffix="  "),
        )
    else:
        trace = dict(
            type="heatmap",
            z=z,
            colorscale=colorscale,
            showscale=showscale,
            reversescale=reversescale,
            **kwargs
        )
        layout = dict(
            annotations=annotations,
            xaxis=dict(
                ticks="", side="top", gridcolor="rgb(0, 0, 0)", showticklabels=False
            ),
            yaxis=dict(ticks="", ticksuffix="  ", showticklabels=False),
        )
    data = [trace]
    return graph_objs.Figure(data=data, layout=layout)
def to_rgb_color_list(color_str, default):
    if "rgb" in color_str:
        return [int(v) for v in color_str.strip("rgb()").split(",")]
    elif "#" in color_str:
        return clrs.hex_to_rgb(color_str)
    else:
        return default
def should_use_black_text(background_color):
    return (
        background_color[0] * 0.299
        + background_color[1] * 0.587
        + background_color[2] * 0.114
    ) > 186
class _AnnotatedHeatmap(object):
    """
    Refer to TraceFactory.create_annotated_heatmap() for docstring
    """
    def __init__(
        self, z, x, y, annotation_text, colorscale, font_colors, reversescale, **kwargs
    ):
        self.z = z
        if x:
            self.x = x
        else:
            self.x = range(len(z[0]))
        if y:
            self.y = y
        else:
            self.y = range(len(z))
        if annotation_text is not None:
            self.annotation_text = annotation_text
        else:
            self.annotation_text = self.z
        self.colorscale = colorscale
        self.reversescale = reversescale
        self.font_colors = font_colors
    def get_text_color(self):
        """
        Get font color for annotations.
        The annotated heatmap can feature two text colors: min_text_color and
        max_text_color. The min_text_color is applied to annotations for
        heatmap values < (max_value - min_value)/2. The user can define these
        two colors. Otherwise the colors are defined logically as black or
        white depending on the heatmap's colorscale.
        :rtype (string, string) min_text_color, max_text_color: text
            color for annotations for heatmap values <
            (max_value - min_value)/2 and text color for annotations for
            heatmap values >= (max_value - min_value)/2
        """
        # Plotly colorscales ranging from a lighter shade to a darker shade
        colorscales = [
            "Greys",
            "Greens",
            "Blues",
            "YIGnBu",
            "YIOrRd",
            "RdBu",
            "Picnic",
            "Jet",
            "Hot",
            "Blackbody",
            "Earth",
            "Electric",
            "Viridis",
            "Cividis",
        ]
        # Plotly colorscales ranging from a darker shade to a lighter shade
        colorscales_reverse = ["Reds"]
        white = "#FFFFFF"
        black = "#000000"
        if self.font_colors:
            min_text_color = self.font_colors[0]
            max_text_color = self.font_colors[-1]
        elif self.colorscale in colorscales and self.reversescale:
            min_text_color = black
            max_text_color = white
        elif self.colorscale in colorscales:
            min_text_color = white
            max_text_color = black
        elif self.colorscale in colorscales_reverse and self.reversescale:
            min_text_color = white
            max_text_color = black
        elif self.colorscale in colorscales_reverse:
            min_text_color = black
            max_text_color = white
        elif isinstance(self.colorscale, list):
            min_col = to_rgb_color_list(self.colorscale[0][1], [255, 255, 255])
            max_col = to_rgb_color_list(self.colorscale[-1][1], [255, 255, 255])
            # swap min/max colors if reverse scale
            if self.reversescale:
                min_col, max_col = max_col, min_col
            if should_use_black_text(min_col):
                min_text_color = black
            else:
                min_text_color = white
            if should_use_black_text(max_col):
                max_text_color = black
            else:
                max_text_color = white
        else:
            min_text_color = black
            max_text_color = black
        return min_text_color, max_text_color
    def get_z_mid(self):
        """
        Get the mid value of z matrix
        :rtype (float) z_avg: average val from z matrix
        """
        if np and isinstance(self.z, np.ndarray):
            z_min = np.amin(self.z)
            z_max = np.amax(self.z)
        else:
            z_min = min([v for row in self.z for v in row])
            z_max = max([v for row in self.z for v in row])
        z_mid = (z_max + z_min) / 2
        return z_mid
    def make_annotations(self):
        """
        Get annotations for each cell of the heatmap with graph_objs.Annotation
        :rtype (list[dict]) annotations: list of annotations for each cell of
            the heatmap
        """
        min_text_color, max_text_color = _AnnotatedHeatmap.get_text_color(self)
        z_mid = _AnnotatedHeatmap.get_z_mid(self)
        annotations = []
        for n, row in enumerate(self.z):
            for m, val in enumerate(row):
                font_color = min_text_color if val < z_mid else max_text_color
                annotations.append(
                    graph_objs.layout.Annotation(
                        text=str(self.annotation_text[n][m]),
                        x=self.x[m],
                        y=self.y[n],
                        xref="x1",
                        yref="y1",
                        font=dict(color=font_color),
                        showarrow=False,
                    )
                )
        return annotations
 | 
	mit | -5,518,445,393,110,075,000 | 32.316993 | 87 | 0.567043 | false | 
| 
	gangadhar-kadam/sapphire_app | 
	accounts/report/budget_variance_report/budget_variance_report.py | 
	1 | 
	4626 | 
	# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import _, msgprint
from webnotes.utils import flt
import time
from accounts.utils import get_fiscal_year
from controllers.trends import get_period_date_ranges, get_period_month_ranges
def execute(filters=None):
	if not filters: filters = {}
	
	columns = get_columns(filters)
	period_month_ranges = get_period_month_ranges(filters["period"], filters["fiscal_year"])
	cam_map = get_costcenter_account_month_map(filters)
	data = []
	for cost_center, cost_center_items in cam_map.items():
		for account, monthwise_data in cost_center_items.items():
			row = [cost_center, account]
			totals = [0, 0, 0]
			for relevant_months in period_month_ranges:
				period_data = [0, 0, 0]
				for month in relevant_months:
					month_data = monthwise_data.get(month, {})
					for i, fieldname in enumerate(["target", "actual", "variance"]):
						value = flt(month_data.get(fieldname))
						period_data[i] += value
						totals[i] += value
				period_data[2] = period_data[0] - period_data[1]
				row += period_data
			totals[2] = totals[0] - totals[1]
			row += totals
			data.append(row)
	return columns, sorted(data, key=lambda x: (x[0], x[1]))
	
def get_columns(filters):
	for fieldname in ["fiscal_year", "period", "company"]:
		if not filters.get(fieldname):
			label = (" ".join(fieldname.split("_"))).title()
			msgprint(_("Please specify") + ": " + label,
				raise_exception=True)
	columns = ["Cost Center:Link/Cost Center:120", "Account:Link/Account:120"]
	group_months = False if filters["period"] == "Monthly" else True
	for from_date, to_date in get_period_date_ranges(filters["period"], filters["fiscal_year"]):
		for label in ["Target (%s)", "Actual (%s)", "Variance (%s)"]:
			if group_months:
				label = label % (from_date.strftime("%b") + " - " + to_date.strftime("%b"))
			else:
				label = label % from_date.strftime("%b")
				
			columns.append(label+":Float:120")
	return columns + ["Total Target:Float:120", "Total Actual:Float:120", 
		"Total Variance:Float:120"]
#Get cost center & target details
def get_costcenter_target_details(filters):
	return webnotes.conn.sql("""select cc.name, cc.distribution_id, 
		cc.parent_cost_center, bd.account, bd.budget_allocated 
		from `tabCost Center` cc, `tabBudget Detail` bd 
		where bd.parent=cc.name and bd.fiscal_year=%s and 
		cc.company=%s order by cc.name""" % ('%s', '%s'), 
		(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
#Get target distribution details of accounts of cost center
def get_target_distribution_details(filters):
	target_details = {}
	for d in webnotes.conn.sql("""select bd.name, bdd.month, bdd.percentage_allocation  
		from `tabBudget Distribution Detail` bdd, `tabBudget Distribution` bd
		where bdd.parent=bd.name and bd.fiscal_year=%s""", (filters["fiscal_year"]), as_dict=1):
			target_details.setdefault(d.name, {}).setdefault(d.month, flt(d.percentage_allocation))
	return target_details
#Get actual details from gl entry
def get_actual_details(filters):
	ac_details = webnotes.conn.sql("""select gl.account, gl.debit, gl.credit, 
		gl.cost_center, MONTHNAME(gl.posting_date) as month_name 
		from `tabGL Entry` gl, `tabBudget Detail` bd 
		where gl.fiscal_year=%s and company=%s
		and bd.account=gl.account and bd.parent=gl.cost_center""" % ('%s', '%s'), 
		(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
		
	cc_actual_details = {}
	for d in ac_details:
		cc_actual_details.setdefault(d.cost_center, {}).setdefault(d.account, []).append(d)
		
	return cc_actual_details
def get_costcenter_account_month_map(filters):
	import datetime
	costcenter_target_details = get_costcenter_target_details(filters)
	tdd = get_target_distribution_details(filters)
	actual_details = get_actual_details(filters)
	cam_map = {}
	for ccd in costcenter_target_details:
		for month_id in range(1, 13):
			month = datetime.date(2013, month_id, 1).strftime('%B')
			
			cam_map.setdefault(ccd.name, {}).setdefault(ccd.account, {})\
				.setdefault(month, webnotes._dict({
					"target": 0.0, "actual": 0.0
				}))
			tav_dict = cam_map[ccd.name][ccd.account][month]
			month_percentage = tdd.get(ccd.distribution_id, {}).get(month, 0) \
				if ccd.distribution_id else 100.0/12
				
			tav_dict.target = flt(ccd.budget_allocated) * month_percentage / 100
			
			for ad in actual_details.get(ccd.name, {}).get(ccd.account, []):
				if ad.month_name == month:
						tav_dict.actual += ad.debit - ad.credit
						
	return cam_map | 
	agpl-3.0 | 6,781,928,573,172,004,000 | 35.722222 | 93 | 0.68396 | false | 
| 
	kdeldycke/meta-package-manager | 
	meta_package_manager/tests/test_platform.py | 
	1 | 
	4103 | 
	# Copyright Kevin Deldycke <[email protected]> and contributors.
# All Rights Reserved.
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
from types import FunctionType
import pytest
from ..managers import pool
from ..platform import (
    ALL_OS_LABELS,
    CURRENT_OS_ID,
    CURRENT_OS_LABEL,
    LINUX,
    MACOS,
    OS_DEFINITIONS,
    WINDOWS,
    current_os,
    is_linux,
    is_macos,
    is_windows,
    os_label,
)
from .conftest import MANAGER_IDS, unless_linux, unless_macos, unless_windows
def test_mutual_exclusion():
    if is_linux():
        assert not is_macos()
        assert not is_windows()
        assert CURRENT_OS_ID == LINUX
        assert CURRENT_OS_LABEL == os_label(LINUX)
    if is_macos():
        assert not is_linux()
        assert not is_windows()
        assert CURRENT_OS_ID == MACOS
        assert CURRENT_OS_LABEL == os_label(MACOS)
    if is_windows():
        assert not is_linux()
        assert not is_macos()
        assert CURRENT_OS_ID == WINDOWS
        assert CURRENT_OS_LABEL == os_label(WINDOWS)
def test_os_definitions():
    assert isinstance(OS_DEFINITIONS, dict)
    # Each OS definition must be unique.
    assert isinstance(ALL_OS_LABELS, frozenset)
    assert len(OS_DEFINITIONS) == len(ALL_OS_LABELS)
    for os_id, data in OS_DEFINITIONS.items():
        # OS ID.
        assert isinstance(os_id, str)
        assert os_id
        assert os_id.isascii()
        assert os_id.isalpha()
        assert os_id.islower()
        # Metadata.
        assert isinstance(data, tuple)
        assert len(data) == 2
        label, os_flag = data
        # OS label.
        assert label
        assert isinstance(label, str)
        assert label.isascii()
        assert label.isalpha()
        assert label in ALL_OS_LABELS
        # OS identification function.
        assert isinstance(os_flag, bool)
        os_id_func_name = f"is_{os_id}"
        assert os_id_func_name in globals()
        os_id_func = globals()[os_id_func_name]
        assert isinstance(os_id_func, FunctionType)
        assert isinstance(os_id_func(), bool)
        assert os_id_func() == os_flag
def test_current_os_func():
    # Function.
    os_id, label = current_os()
    assert os_id in OS_DEFINITIONS
    assert label in [os[0] for os in OS_DEFINITIONS.values()]
    # Constants.
    assert os_id == CURRENT_OS_ID
    assert label == CURRENT_OS_LABEL
def test_os_label():
    os_id, os_name = current_os()
    assert os_label(os_id) == os_name
def test_blacklisted_manager():
    """Check all managers are accounted for on each platforms."""
    # TODO: Use that blacklist to generate readme.rst's support table?
    blacklists = {
        LINUX: {"cask", "mas"},
        MACOS: {"apt", "flatpak", "opkg", "snap"},
        WINDOWS: {"apt", "brew", "cask", "flatpak", "mas", "opkg", "snap"},
    }
    blacklist = blacklists[current_os()[0]]
    # List of supported managers on the current platform.
    supported = {m.id for m in pool().values() if m.supported}
    assert supported == MANAGER_IDS - blacklist
# Test unittest decorator helpers.
@unless_linux
def test_unless_linux():
    assert is_linux()
    assert not is_macos()
    assert not is_windows()
@unless_macos
def test_unless_macos():
    assert not is_linux()
    assert is_macos()
    assert not is_windows()
@unless_windows
def test_unless_windows():
    assert not is_linux()
    assert not is_macos()
    assert is_windows()
 | 
	gpl-2.0 | 920,887,995,347,523,500 | 28.307143 | 77 | 0.645625 | false | 
| 
	sunqm/pyscf | 
	pyscf/mcscf/test/test_newton_casscf.py | 
	1 | 
	4618 | 
	#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from functools import reduce
import numpy
from pyscf import gto, scf, lib, fci
from pyscf.mcscf import newton_casscf, CASSCF, addons
mol = gto.Mole()
mol.verbose = lib.logger.DEBUG
mol.output = '/dev/null'
mol.atom = [
    ['H', ( 5.,-1.    , 1.   )],
    ['H', ( 0.,-5.    ,-2.   )],
    ['H', ( 4.,-0.5   ,-3.   )],
    ['H', ( 0.,-4.5   ,-1.   )],
    ['H', ( 3.,-0.5   ,-0.   )],
    ['H', ( 0.,-3.    ,-1.   )],
    ['H', ( 2.,-2.5   , 0.   )],
    ['H', ( 1., 1.    , 3.   )],
]
mol.basis = 'sto-3g'
mol.build()
b = 1.4
mol_N2 = gto.Mole()
mol_N2.build(
verbose = lib.logger.DEBUG,
output = '/dev/null',
atom = [
    ['N',(  0.000000,  0.000000, -b/2)],
    ['N',(  0.000000,  0.000000,  b/2)], ],
basis = {'N': 'ccpvdz', },
symmetry = 1
)
mf_N2 = scf.RHF (mol_N2).run ()
solver1 = fci.FCI(mol_N2)
solver1.spin = 0
solver1.nroots = 2
solver2 = fci.FCI(mol_N2, singlet=False)
solver2.spin = 2
mc_N2 = CASSCF(mf_N2, 4, 4)
mc_N2 = addons.state_average_mix_(mc_N2, [solver1, solver2],
                                     (0.25,0.25,0.5)).newton ()
mc_N2.kernel()
mf = scf.RHF(mol)
mf.max_cycle = 3
mf.kernel()
mc = newton_casscf.CASSCF(mf, 4, 4)
mc.fcisolver = fci.direct_spin1.FCI(mol)
mc.kernel()
sa = CASSCF(mf, 4, 4)
sa.fcisolver = fci.direct_spin1.FCI (mol)
sa = sa.state_average ([0.5,0.5]).newton ()
sa.kernel()
def tearDownModule():
    global mol, mf, mc, sa, mol_N2, mf_N2, mc_N2
    del mol, mf, mc, sa, mol_N2, mf_N2, mc_N2
class KnownValues(unittest.TestCase):
    def test_gen_g_hop(self):
        numpy.random.seed(1)
        mo = numpy.random.random(mf.mo_coeff.shape)
        ci0 = numpy.random.random((6,6))
        ci0/= numpy.linalg.norm(ci0)
        gall, gop, hop, hdiag = newton_casscf.gen_g_hop(mc, mo, ci0, mc.ao2mo(mo))
        self.assertAlmostEqual(lib.finger(gall), 21.288022525148595, 8)
        self.assertAlmostEqual(lib.finger(hdiag), -4.6864640132374618, 8)
        x = numpy.random.random(gall.size)
        u, ci1 = newton_casscf.extract_rotation(mc, x, 1, ci0)
        self.assertAlmostEqual(lib.finger(gop(u, ci1)), -412.9441873541524, 8)
        self.assertAlmostEqual(lib.finger(hop(x)), 73.358310983341198, 8)
    def test_get_grad(self):
        self.assertAlmostEqual(mc.e_tot, -3.6268060853430573, 8)
        self.assertAlmostEqual(abs(mc.get_grad()).max(), 0, 5)
    def test_sa_gen_g_hop(self):
        numpy.random.seed(1)
        mo = numpy.random.random(mf.mo_coeff.shape)
        ci0 = numpy.random.random((2,36))
        ci0/= numpy.linalg.norm(ci0, axis=1)[:,None]
        ci0 = list (ci0.reshape ((2,6,6)))
        gall, gop, hop, hdiag = newton_casscf.gen_g_hop(sa, mo, ci0, sa.ao2mo(mo))
        self.assertAlmostEqual(lib.finger(gall), 32.46973284682045, 8)
        self.assertAlmostEqual(lib.finger(hdiag), -63.6527761153809, 8)
        x = numpy.random.random(gall.size)
        u, ci1 = newton_casscf.extract_rotation(sa, x, 1, ci0)
        self.assertAlmostEqual(lib.finger(gop(u, ci1)), -49.017079186126, 8)
        self.assertAlmostEqual(lib.finger(hop(x)), 169.47893548740288, 8)
    def test_sa_get_grad(self):
        self.assertAlmostEqual(sa.e_tot, -3.62638372957158, 7)
        # MRH 06/24/2020: convergence thresh of scf may not have consistent
        # meaning in SA problems
        self.assertAlmostEqual(abs(sa.get_grad()).max(), 0, 5)
    def test_sa_mix(self):
        e = mc_N2.e_states
        self.assertAlmostEqual(mc_N2.e_tot, -108.80340952016508, 7)
        self.assertAlmostEqual(mc_N2.e_average, -108.80340952016508, 7)
        self.assertAlmostEqual(numpy.dot(e,[.25,.25,.5]), -108.80340952016508, 7)
        dm1 = mc_N2.analyze()
        self.assertAlmostEqual(lib.fp(dm1[0]), 0.52172669549357464, 4)
        self.assertAlmostEqual(lib.fp(dm1[1]), 0.53366776017869022, 4)
        self.assertAlmostEqual(lib.fp(dm1[0]+dm1[1]), 1.0553944556722636, 4)
        mc_N2.cas_natorb()
if __name__ == "__main__":
    print("Full Tests for mcscf.addons")
    unittest.main()
 | 
	apache-2.0 | 7,542,723,317,859,126,000 | 34.251908 | 82 | 0.621914 | false | 
| 
	Hammer2900/SunflowerX | 
	application/tools/find_files.py | 
	1 | 
	13799 | 
	import os
import gtk
import user
import pango
import gobject
from threading import Thread, Event
class Column:
    ICON = 0
    NAME = 1
    DIRECTORY = 2
class FindFiles(gobject.GObject):
    """Find files tool"""
    __gtype_name__ = 'Sunflower_FindFiles'
    __gsignals__ = {
        'notify-start': (gobject.SIGNAL_RUN_LAST, None, ()),
        'notify-stop': (gobject.SIGNAL_RUN_LAST, None, ())
    }
    def __init__(self, parent, application):
        gobject.GObject.__init__(self)
        # store parameters
        self._parent = parent
        self._application = application
        self._extensions = []
        self._path = self._parent.path
        self._provider = None
        self._running = False
        # thread control object
        self._abort = Event()
        if hasattr(self._parent, 'get_provider'):
            self._provider = self._parent.get_provider()
        # configure window
        self.window = gtk.Window(type=gtk.WINDOW_TOPLEVEL)
        self.window.set_title(_('Find files'))
        self.window.set_default_size(550, 500)
        self.window.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
        self.window.set_transient_for(application)
        self.window.set_border_width(7)
        self.window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
        self.window.set_wmclass('Sunflower', 'Sunflower')
        self.window.connect('key-press-event', self._handle_key_press)
        # create interface
        vbox = gtk.VBox(False, 7)
        # create path and basic options
        self._table_basic = gtk.Table(3, 2, False)
        self._table_basic.set_col_spacings(5)
        self._table_basic.set_row_spacings(2)
        label_path = gtk.Label(_('Search in:'))
        label_path.set_alignment(0, 0.5)
        self._entry_path = gtk.Entry()
        self._entry_path.connect('activate', self.find_files)
        if hasattr(self._parent, 'path'):
            # get path from the parent
            self._entry_path.set_text(self._parent.path)
        else:
            # parent has no path, set user home directory
            self._entry_path.set_text(os.path.expanduser(user.home))
        button_browse = gtk.Button(label=_('Browse'))
        button_browse.connect('clicked', self._choose_directory)
        self._checkbox_recursive = gtk.CheckButton(label=_('Search recursively'))
        self._checkbox_recursive.set_active(True)
        # create extensions notebook
        self._extension_list = gtk.Notebook()
        # create list
        self._list = gtk.ListStore(str, str, str)
        self._names = gtk.TreeView(model=self._list)
        cell_icon = gtk.CellRendererPixbuf()
        cell_name = gtk.CellRendererText()
        cell_directory = gtk.CellRendererText()
        col_name = gtk.TreeViewColumn(_('Name'))
        col_name.set_expand(True)
        col_directory = gtk.TreeViewColumn(_('Location'))
        col_directory.set_expand(True)
        # pack renderer
        col_name.pack_start(cell_icon, False)
        col_name.pack_start(cell_name, True)
        col_directory.pack_start(cell_directory, True)
        # connect renderer attributes
        col_name.add_attribute(cell_icon, 'icon-name', Column.ICON)
        col_name.add_attribute(cell_name, 'text', Column.NAME)
        col_directory.add_attribute(cell_directory, 'text', Column.DIRECTORY)
        self._names.append_column(col_name)
        self._names.append_column(col_directory)
        self._names.connect('row-activated', self.__handle_row_activated)
        container = gtk.ScrolledWindow()
        container.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
        container.set_shadow_type(gtk.SHADOW_IN)
        # create status label
        self._status = gtk.Label()
        self._status.set_alignment(0, 0.5)
        self._status.set_ellipsize(pango.ELLIPSIZE_MIDDLE)
        self._status.set_property('no-show-all', True)
        # create controls
        hbox_controls = gtk.HBox(False, 5)
        self._image_find = gtk.Image()
        self._image_find.set_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_BUTTON)
        self._button_find = gtk.Button()
        self._button_find.set_label(_('Start'))
        self._button_find.set_image(self._image_find)
        self._button_find.connect('clicked', self.find_files)
        button_close = gtk.Button(stock=gtk.STOCK_CLOSE)
        button_close.connect('clicked', self._close_window)
        # pack interface
        self._table_basic.attach(label_path, 0, 1, 0, 1, xoptions=gtk.SHRINK | gtk.FILL)
        self._table_basic.attach(self._entry_path, 1, 2, 0, 1, xoptions=gtk.EXPAND | gtk.FILL)
        self._table_basic.attach(button_browse, 2, 3, 0, 1, xoptions=gtk.SHRINK | gtk.FILL)
        self._table_basic.attach(self._checkbox_recursive, 1, 2, 1, 2)
        container.add(self._names)
        hbox_controls.pack_end(self._button_find, False, False, 0)
        hbox_controls.pack_end(button_close, False, False, 0)
        vbox.pack_start(self._table_basic, False, False, 0)
        vbox.pack_start(self._extension_list, False, False, 0)
        vbox.pack_end(hbox_controls, False, False, 0)
        vbox.pack_end(self._status, False, False, 0)
        vbox.pack_end(container, True, True, 0)
        self.window.add(vbox)
        # create extensions
        self.__create_extensions()
        # show all widgets
        self.window.show_all()
    def __handle_row_activated(self, treeview, path, view_column, data=None):
        """Handle actions on list"""
        # get list selection
        selection = treeview.get_selection()
        list_, iter_ = selection.get_selected()
        # we need selection for this
        if iter_ is None: return
        name = list_.get_value(iter_, Column.NAME)
        path = list_.get_value(iter_, Column.DIRECTORY)
        # get active object
        active_object = self._application.get_active_object()
        if hasattr(active_object, 'change_path'):
            # change path
            active_object.change_path(path, name)
            # close window
            self._close_window()
        else:
            # notify user about active object
            dialog = gtk.MessageDialog(
                self.window,
                gtk.DIALOG_DESTROY_WITH_PARENT,
                gtk.MESSAGE_INFO,
                gtk.BUTTONS_OK,
                _(
                    'Active object doesn\'t support changing '
                    'path. Set focus on a different object, '
                    'preferably file list, and try again.'
                )
            )
            dialog.run()
            dialog.destroy()
    def __create_extensions(self):
        """Create rename extensions"""
        for ExtensionClass in self._application.find_extension_classes.values():
            extension = ExtensionClass(self)
            title = extension.get_title()
            # add tab
            self._extension_list.append_page(extension.get_container(), gtk.Label(title))
            # store extension for later use
            self._extensions.append(extension)
    def __update_status_label(self, path):
        """Update status label with current scanning path"""
        self._status.set_text(path)
    def __update_status(self, running=True):
        """Update button status"""
        self._running = running
        if running:
            # disable interface to prevent changes during search
            self._table_basic.set_sensitive(False)
            self._extension_list.set_sensitive(False)
            # show status bar
            self._status.show()
            # update find button
            self._image_find.set_from_stock(gtk.STOCK_MEDIA_STOP, gtk.ICON_SIZE_BUTTON)
            self._button_find.set_label(_('Stop'))
        else:
            # enable interface to prevent changes during search
            self._table_basic.set_sensitive(True)
            self._extension_list.set_sensitive(True)
            # hide status bar
            self._status.hide()
            # update find button
            self._image_find.set_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_BUTTON)
            self._button_find.set_label(_('Start'))
    def __find_files(self, path, children, scan_recursively):
        """Threaded find files method"""
        scan_queue = []
        extension_list = []
        # prepare extension objects for operation
        for child in children:
            extension_list.append(child.get_data('extension'))
        # tell extensions search is starting
        self.emit('notify-start')
        # update thread status
        gobject.idle_add(self.__update_status, True)
        gobject.idle_add(self.__update_status_label, path)
        # add current path to scan queue
        try:
            item_list = self._provider.list_dir(path)
            item_list = map(lambda new_item: os.path.join(path, new_item), item_list)
            scan_queue.extend(item_list)
        except:
            pass
        # traverse through directories
        while not self._abort.is_set() and len(scan_queue) > 0:
            # get next item in queue
            item = scan_queue.pop(0)
            if self._provider.is_dir(item) and scan_recursively:
                # extend scan queue with directory content
                gobject.idle_add(self.__update_status_label, item)
                try:
                    item_list = self._provider.list_dir(item)
                    item_list = map(lambda new_item: os.path.join(item, new_item), item_list)
                    scan_queue.extend(item_list)
                except:
                    pass
            # check if item fits cirteria
            match = True
            for extension in extension_list:
                if not extension.is_path_ok(item):
                    match = False
                    break
            # add item if score is right
            if match:
                name = os.path.basename(item)
                path = os.path.dirname(item)
                icon = self._application.icon_manager.get_icon_for_file(item)
                self._list.append((icon, name, path))
        # update thread status
        gobject.idle_add(self.__update_status, False)
        # tell extensions search has been stopped
        self.emit('notify-stop')
    def _close_window(self, widget=None, data=None):
        """Close window"""
        self._abort.set()  # notify search thread we are terminating
        self.window.destroy()
    def _choose_directory(self, widget=None, data=None):
        """Show 'FileChooser' dialog"""
        dialog = gtk.FileChooserDialog(
            title=_('Find files'),
            parent=self._application,
            action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
            buttons=(
                gtk.STOCK_CANCEL,
                gtk.RESPONSE_REJECT,
                gtk.STOCK_OK,
                gtk.RESPONSE_ACCEPT
            )
        )
        dialog.set_filename(self._entry_path.get_text())
        response = dialog.run()
        if response == gtk.RESPONSE_ACCEPT:
            self._entry_path.set_text(dialog.get_filename())
        dialog.destroy()
    def _handle_key_press(self, widget, event, data=None):
        """Handle pressing keys"""
        if event.keyval == gtk.keysyms.Escape:
            self._close_window()
    def stop_search(self, widget=None, data=None):
        """Stop searching for files"""
        pass
    def find_files(self, widget=None, data=None):
        """Start searching for files"""
        if not self._running:
            # thread is not running, start it
            path = self._entry_path.get_text()
            # make sure we have a valid provider
            if self._provider is None:
                ProviderClass = self._application.get_provider_by_protocol('file')
                self._provider = ProviderClass(self._parent)
            # check if specified path exists
            if not self._provider.is_dir(path):
                dialog = gtk.MessageDialog(
                    self.window,
                    gtk.DIALOG_DESTROY_WITH_PARENT,
                    gtk.MESSAGE_ERROR,
                    gtk.BUTTONS_OK,
                    _(
                        'Specified path is not valid or doesn\'t '
                        'exist anymore. Please check your selection '
                        'and try again.'
                    )
                )
                dialog.run()
                dialog.destroy()
                return
            # get list of active extensions
            active_children = filter(
                lambda child: child.get_data('extension').is_active(),
                self._extension_list.get_children()
            )
            if len(active_children) == 0:
                dialog = gtk.MessageDialog(
                    self.window,
                    gtk.DIALOG_DESTROY_WITH_PARENT,
                    gtk.MESSAGE_WARNING,
                    gtk.BUTTONS_OK,
                    _(
                        'You need to enable at least one extension '
                        'in order to find files and directories!'
                    )
                )
                dialog.run()
                dialog.destroy()
                return
            # set thread control objects
            self._abort.clear()
            # clear existing list
            self._list.clear()
            # start the thread
            params = {
                'path': path,
                'children': active_children,
                'scan_recursively': self._checkbox_recursive.get_active()
            }
            thread = Thread(target=self.__find_files, kwargs=params)
            thread.start()
        else:
            # thread is running, set abort event
            self._abort.set()
 | 
	gpl-3.0 | -399,886,151,447,045,440 | 32.250602 | 94 | 0.560838 | false | 
| 
	bdang2012/taiga-back-casting | 
	tests/unit/test_timeline.py | 
	1 | 
	3510 | 
	# Copyright (C) 2014-2015 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2015 Jesús Espino <[email protected]>
# Copyright (C) 2014-2015 David Barragán <[email protected]>
# Copyright (C) 2014-2015 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
from unittest.mock import patch, call
from django.core.exceptions import ValidationError
from taiga.timeline import service
from taiga.timeline.models import Timeline
from taiga.projects.models import Project
from taiga.users.models import User
import pytest
def test_push_to_timeline_many_objects():
    with patch("taiga.timeline.service._add_to_object_timeline") as mock:
        users = [User(), User(), User()]
        project = Project()
        service.push_to_timeline(users, project, "test", project.created_date)
        assert mock.call_count == 3
        assert mock.mock_calls == [
            call(users[0], project, "test", project.created_date, "default", {}),
            call(users[1], project, "test", project.created_date, "default", {}),
            call(users[2], project, "test", project.created_date, "default", {}),
        ]
        with pytest.raises(Exception):
            service.push_to_timeline(None, project, "test")
def test_add_to_objects_timeline():
    with patch("taiga.timeline.service._add_to_object_timeline") as mock:
        users = [User(), User(), User()]
        project = Project()
        service._add_to_objects_timeline(users, project, "test", project.created_date)
        assert mock.call_count == 3
        assert mock.mock_calls == [
            call(users[0], project, "test", project.created_date, "default", {}),
            call(users[1], project, "test", project.created_date, "default", {}),
            call(users[2], project, "test", project.created_date, "default", {}),
        ]
        with pytest.raises(Exception):
            service.push_to_timeline(None, project, "test")
def test_get_impl_key_from_model():
    assert service._get_impl_key_from_model(Timeline, "test") == "timeline.timeline.test"
    with pytest.raises(Exception):
        service._get_impl_key(None)
def test_get_impl_key_from_typename():
    assert service._get_impl_key_from_typename("timeline.timeline", "test") == "timeline.timeline.test"
    with pytest.raises(Exception):
        service._get_impl_key(None)
def test_register_timeline_implementation():
    test_func = lambda x: "test-func-result"
    service.register_timeline_implementation("timeline.timeline", "test", test_func)
    assert service._timeline_impl_map["timeline.timeline.test"](None) == "test-func-result"
    @service.register_timeline_implementation("timeline.timeline", "test-decorator")
    def decorated_test_function(x):
        return "test-decorated-func-result"
    assert service._timeline_impl_map["timeline.timeline.test-decorator"](None) == "test-decorated-func-result"
 | 
	agpl-3.0 | -5,399,271,881,389,798,000 | 42.296296 | 111 | 0.687482 | false | 
| 
	trnewman/VT-USRP-daughterboard-drivers_python | 
	gr-wxgui/src/python/numbersink.py | 
	1 | 
	25273 | 
	#!/usr/bin/env python
#
# Copyright 2003,2004,2005,2006,2007 Free Software Foundation, Inc.
# 
# This file is part of GNU Radio
# 
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# 
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING.  If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# 
from gnuradio import gr, gru, window
from gnuradio.wxgui import stdgui
import wx
#from wx import StaticText
import gnuradio.wxgui.plot as plot
import numpy
import threading
import math    
default_numbersink_size = (640,240)
default_number_rate = gr.prefs().get_long('wxgui', 'number_rate', 15)
class number_sink_base(object):
    def __init__(self, input_is_real=False, unit='',base_value=0, minval=-100.0,maxval=100.0,factor=1.0,decimal_places=10, ref_level=50,
                 sample_rate=1, 
                 number_rate=default_number_rate,
                 average=False, avg_alpha=None, label='', peak_hold=False):
        # initialize common attributes
        self.unit=unit
        self.base_value = base_value
        self.minval=minval
        self.maxval=maxval
        self.factor=factor
        self.y_divs = 8
        self.decimal_places=decimal_places
        self.ref_level = ref_level
        self.sample_rate = sample_rate
        number_size=1
        self.number_size = number_size
        self.number_rate = number_rate
        self.average = average
        if avg_alpha is None:
            self.avg_alpha = 2.0 / number_rate
        else:
            self.avg_alpha = avg_alpha
        self.label = label
        self.peak_hold = peak_hold
        self.show_gauge = True
        self.input_is_real = input_is_real
        self.msgq = gr.msg_queue(2)         # queue that holds a maximum of 2 messages
    def set_decimal_places(self, decimal_places):
        self.decimal_places = decimal_places
    def set_ref_level(self, ref_level):
        self.ref_level = ref_level
    def print_current_value(self, comment):
        print comment,self.win.current_value
    def set_average(self, average):
        self.average = average
        if average:
            self.avg.set_taps(self.avg_alpha)
            self.set_peak_hold(False)
        else:
            self.avg.set_taps(1.0)
    def set_peak_hold(self, enable):
        self.peak_hold = enable
        if enable:
            self.set_average(False)
        self.win.set_peak_hold(enable)
    def set_show_gauge(self, enable):
        self.show_gauge = enable
        self.win.set_show_gauge(enable)
    def set_avg_alpha(self, avg_alpha):
        self.avg_alpha = avg_alpha
    def set_base_value(self, base_value):
        self.base_value = base_value
        
class number_sink_f(gr.hier_block, number_sink_base):
    def __init__(self, fg, parent, unit='',base_value=0,minval=-100.0,maxval=100.0,factor=1.0,
                 decimal_places=10, ref_level=50, sample_rate=1, #number_size=512,
                 number_rate=default_number_rate, average=False, avg_alpha=None,
                 label='', size=default_numbersink_size, peak_hold=False):
        number_sink_base.__init__(self, unit=unit, input_is_real=True, base_value=base_value,
                               minval=minval,maxval=maxval,factor=factor,
                               decimal_places=decimal_places, ref_level=ref_level,
                               sample_rate=sample_rate, #number_size=number_size,
                               number_rate=number_rate,
                               average=average, avg_alpha=avg_alpha, label=label,
                               peak_hold=peak_hold)
         
        number_size=1                      
        #s2p = gr.stream_to_vector(gr.sizeof_float, number_size)
        one_in_n = gr.keep_one_in_n(gr.sizeof_float,
                                    max(1, int(sample_rate/number_rate)))
            
        #c2mag = gr.complex_to_mag(number_size)
        self.avg = gr.single_pole_iir_filter_ff(1.0, number_size)
        # FIXME  We need to add 3dB to all bins but the DC bin
        #log = gr.nlog10_ff(20, number_size,
        #                   -20*math.log10(number_size)-10*math.log10(power/number_size))
        sink = gr.message_sink(gr.sizeof_float , self.msgq, True)
        #fg.connect (s2p, one_in_n, fft, c2mag, self.avg, log, sink)
        fg.connect(self.avg,one_in_n,sink)
        gr.hier_block.__init__(self, fg, self.avg, sink)
        self.win = number_window(self, parent, size=size,label=label)
        self.set_average(self.average)
class number_sink_c(gr.hier_block, number_sink_base):
    def __init__(self, fg, parent, unit='',base_value=0,minval=-100.0,maxval=100.0,factor=1.0,
                 decimal_places=10, ref_level=50, sample_rate=1, #number_size=512,
                 number_rate=default_number_rate, average=False, avg_alpha=None,
                 label='', size=default_numbersink_size, peak_hold=False):
        number_sink_base.__init__(self, unit=unit, input_is_real=False, base_value=base_value,factor=factor,
                               minval=minval,maxval=maxval,decimal_places=decimal_places, ref_level=ref_level,
                               sample_rate=sample_rate, #number_size=number_size,
                               number_rate=number_rate,
                               average=average, avg_alpha=avg_alpha, label=label,
                               peak_hold=peak_hold)
        number_size=1                      
        one_in_n = gr.keep_one_in_n(gr.sizeof_gr_complex,
                                    max(1, int(sample_rate/number_rate)))
            
        #c2mag = gr.complex_to_mag(number_size)
        self.avg = gr.single_pole_iir_filter_cc(1.0, number_size)
        # FIXME  We need to add 3dB to all bins but the DC bin
        #log = gr.nlog10_ff(20, number_size,
        #                   -20*math.log10(number_size)-10*math.log10(power/number_size))
        sink = gr.message_sink(gr.sizeof_gr_complex , self.msgq, True)
        #fg.connect (s2p, one_in_n, fft, c2mag, self.avg, log, sink)
        fg.connect(self.avg,one_in_n,sink)
        gr.hier_block.__init__(self, fg, self.avg, sink)
        self.win = number_window(self, parent, size=size,label=label)
        self.set_average(self.average)
# ------------------------------------------------------------------------
myDATA_EVENT = wx.NewEventType()
EVT_DATA_EVENT = wx.PyEventBinder (myDATA_EVENT, 0)
class DataEvent(wx.PyEvent):
    def __init__(self, data):
        wx.PyEvent.__init__(self)
        self.SetEventType (myDATA_EVENT)
        self.data = data
    def Clone (self): 
        self.__class__ (self.GetId())
class input_watcher (threading.Thread):
    def __init__ (self, msgq, number_size, event_receiver, **kwds):
        threading.Thread.__init__ (self, **kwds)
        self.setDaemon (1)
        self.msgq = msgq
        self.number_size = number_size
        self.event_receiver = event_receiver
        self.keep_running = True
        self.start ()
    def run (self):
        while (self.keep_running):
            msg = self.msgq.delete_head()  # blocking read of message queue
            itemsize = int(msg.arg1())
            nitems = int(msg.arg2())
            s = msg.to_string()            # get the body of the msg as a string
            # There may be more than one number in the message.
            # If so, we take only the last one
            if nitems > 1:
                start = itemsize * (nitems - 1)
                s = s[start:start+itemsize]
            complex_data = numpy.fromstring (s, numpy.float32)
            de = DataEvent (complex_data)
            wx.PostEvent (self.event_receiver, de)
            del de
    
#========================================================================================
class static_text_window (wx.StaticText): #plot.PlotCanvas):
    def __init__ (self, parent, numbersink,id = -1,label="number",
                  pos = wx.DefaultPosition, size = wx.DefaultSize,
                  style = wx.DEFAULT_FRAME_STYLE, name = ""):
        #plot.PlotCanvas.__init__ (self, parent, id, pos, size, style, name)
        wx.StaticText.__init__(self, parent, id, label, pos, size, style, name)
        #self.static_text=wx.StaticText( parent, id, label, pos, (size[0]/2,size[1]/2), style, name)
        #gauge_style = wx.GA_HORIZONTAL
        #self.gauge=wx.Gauge( parent, id, range=1000, pos=(pos[0],pos[1]+size[1]/2),size=(size[0]/2,size[1]/2), style=gauge_style,  name = "gauge")
        #wx.BoxSizer.__init__ (self,wx.VERTICAL)
        #self.Add (self.static_text, 0, wx.EXPAND)
        #self.Add (self.gauge, 1, wx.EXPAND)
        self.parent=parent
        self.label=label
        #self.y_range = None
        self.numbersink = numbersink
        self.peak_hold = False
        self.peak_vals = None
        #self.SetEnableGrid (True)
        # self.SetEnableZoom (True)
        # self.SetBackgroundColour ('black')
        
        self.build_popup_menu()
        
        #EVT_DATA_EVENT (self, self.set_data)
        #wx.EVT_CLOSE (self, self.on_close_window)
        #self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
        self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
        #self.input_watcher = input_watcher(numbersink.msgq, numbersink.number_size, self)
    def on_close_window (self, event):
        print "number_window:on_close_window"
        self.keep_running = False
    def set_peak_hold(self, enable):
        self.peak_hold = enable
        self.peak_vals = None
    def update_y_range (self):
        ymax = self.numbersink.ref_level
        ymin = self.numbersink.ref_level - self.numbersink.decimal_places * self.numbersink.y_divs
        self.y_range = self._axisInterval ('min', ymin, ymax)
    def on_average(self, evt):
        # print "on_average"
        self.numbersink.set_average(evt.IsChecked())
    def on_peak_hold(self, evt):
        # print "on_peak_hold"
        self.numbersink.set_peak_hold(evt.IsChecked())
    def on_show_gauge(self, evt):
        # print "on_show_gauge"
        #if evt.IsChecked():
        self.numbersink.set_show_gauge(evt.IsChecked())
        print evt.IsChecked()
        #  print "show gauge"
        #else:
        #  self.parent.gauge.Hide()
        #  print "hide gauge"
    def on_incr_ref_level(self, evt):
        # print "on_incr_ref_level"
        self.numbersink.set_ref_level(self.numbersink.ref_level
                                   + self.numbersink.decimal_places)
    def on_decr_ref_level(self, evt):
        # print "on_decr_ref_level"
        self.numbersink.set_ref_level(self.numbersink.ref_level
                                   - self.numbersink.decimal_places)
    def on_incr_decimal_places(self, evt):
        # print "on_incr_decimal_places"
        self.numbersink.set_decimal_places(self.numbersink.decimal_places+1) #next_up(self.numbersink.decimal_places, (1,2,5,10,20)))
    def on_decr_decimal_places(self, evt):
        # print "on_decr_decimal_places"
        self.numbersink.set_decimal_places(max(self.numbersink.decimal_places-1,0)) #next_down(self.numbersink.decimal_places, (1,2,5,10,20)))
    def on_decimal_places(self, evt):
        # print "on_decimal_places"
        Id = evt.GetId()
        if Id == self.id_decimal_places_0:
            self.numbersink.set_decimal_places(0)
        elif Id == self.id_decimal_places_1:
            self.numbersink.set_decimal_places(1)
        elif Id == self.id_decimal_places_2:
            self.numbersink.set_decimal_places(2)
        elif Id == self.id_decimal_places_3:
            self.numbersink.set_decimal_places(3)
        elif Id == self.id_decimal_places_6:
            self.numbersink.set_decimal_places(6)
        elif Id == self.id_decimal_places_9:
            self.numbersink.set_decimal_places(9)
        
    def on_right_click(self, event):
        menu = self.popup_menu
        for id, pred in self.checkmarks.items():
            item = menu.FindItemById(id)
            item.Check(pred())
        self.PopupMenu(menu, event.GetPosition())
    def build_popup_menu(self):
        #self.id_hide_gauge = wx.NewId()
        self.id_show_gauge = wx.NewId()
        self.id_incr_ref_level = wx.NewId()
        self.id_decr_ref_level = wx.NewId()
        self.id_incr_decimal_places = wx.NewId()
        self.id_decr_decimal_places = wx.NewId()
        self.id_decimal_places_0 = wx.NewId()
        self.id_decimal_places_1 = wx.NewId()
        self.id_decimal_places_2 = wx.NewId()
        self.id_decimal_places_3 = wx.NewId()
        self.id_decimal_places_6 = wx.NewId()
        self.id_decimal_places_9 = wx.NewId()
        self.id_average = wx.NewId()
        self.id_peak_hold = wx.NewId()
        self.Bind(wx.EVT_MENU, self.on_average, id=self.id_average)
        self.Bind(wx.EVT_MENU, self.on_peak_hold, id=self.id_peak_hold)
        #self.Bind(wx.EVT_MENU, self.on_hide_gauge, id=self.id_hide_gauge)
        self.Bind(wx.EVT_MENU, self.on_show_gauge, id=self.id_show_gauge)
        self.Bind(wx.EVT_MENU, self.on_incr_ref_level, id=self.id_incr_ref_level)
        self.Bind(wx.EVT_MENU, self.on_decr_ref_level, id=self.id_decr_ref_level)
        self.Bind(wx.EVT_MENU, self.on_incr_decimal_places, id=self.id_incr_decimal_places)
        self.Bind(wx.EVT_MENU, self.on_decr_decimal_places, id=self.id_decr_decimal_places)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_0)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_1)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_2)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_3)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_6)
        self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_9)
        # make a menu
        menu = wx.Menu()
        self.popup_menu = menu
        menu.AppendCheckItem(self.id_average, "Average")
        menu.AppendCheckItem(self.id_peak_hold, "Peak Hold")
        #menu.Append(self.id_hide_gauge, "Hide gauge")
        menu.AppendCheckItem(self.id_show_gauge, "Show gauge")
        menu.Append(self.id_incr_ref_level, "Incr Ref Level")
        menu.Append(self.id_decr_ref_level, "Decr Ref Level")
        menu.Append(self.id_incr_decimal_places, "Incr decimal places")
        menu.Append(self.id_decr_decimal_places, "Decr decimal places")
        menu.AppendSeparator()
        # we'd use RadioItems for these, but they're not supported on Mac
        menu.AppendCheckItem(self.id_decimal_places_0, "0 decimal places")
        menu.AppendCheckItem(self.id_decimal_places_1, "1 decimal places")
        menu.AppendCheckItem(self.id_decimal_places_2, "2 decimal places")
        menu.AppendCheckItem(self.id_decimal_places_3, "3 decimal places")
        menu.AppendCheckItem(self.id_decimal_places_6, "6 decimal places")
        menu.AppendCheckItem(self.id_decimal_places_9, "9 decimal places")
        self.checkmarks = {
            self.id_average : lambda : self.numbersink.average,
            self.id_peak_hold : lambda : self.numbersink.peak_hold,#            self.id_hide_gauge : lambda : self.numbersink.hide_gauge,
            self.id_show_gauge : lambda : self.numbersink.show_gauge,
            self.id_decimal_places_0 : lambda : self.numbersink.decimal_places == 0,
            self.id_decimal_places_1 : lambda : self.numbersink.decimal_places == 1,
            self.id_decimal_places_2 : lambda : self.numbersink.decimal_places == 2,
            self.id_decimal_places_3 : lambda : self.numbersink.decimal_places == 3,
            self.id_decimal_places_6 : lambda : self.numbersink.decimal_places == 6,
            self.id_decimal_places_9 : lambda : self.numbersink.decimal_places == 9,
            }
def next_up(v, seq):
    """
    Return the first item in seq that is > v.
    """
    for s in seq:
        if s > v:
            return s
    return v
def next_down(v, seq):
    """
    Return the last item in seq that is < v.
    """
    rseq = list(seq[:])
    rseq.reverse()
    for s in rseq:
        if s < v:
            return s
    return v
#========================================================================================
class number_window (plot.PlotCanvas):
    def __init__ (self, numbersink, parent, id = -1,label="number",
                  pos = wx.DefaultPosition, size = wx.DefaultSize,
                  style = wx.DEFAULT_FRAME_STYLE, name = ""):
        plot.PlotCanvas.__init__ (self, parent, id, pos, size, style, name)
        #wx.StaticText.__init__(self, parent, id, label, pos, (size[0]/2,size[1]/2), style, name)
        #print 'parent',parent
        self.static_text=static_text_window( self, numbersink,id, label, pos, (size[0]/2,size[1]/2), style, name)
        gauge_style = wx.GA_HORIZONTAL
        vbox=wx.BoxSizer(wx.VERTICAL)
        vbox.Add (self.static_text, 0, wx.EXPAND)
        self.current_value=None
        if numbersink.input_is_real:
          self.gauge=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]/2),size=(size[0]/2,size[1]/2), style=gauge_style,  name = "gauge")
          vbox.Add (self.gauge, 1, wx.EXPAND)
        else:
          self.gauge=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]/3),size=(size[0]/2,size[1]/3), style=gauge_style,  name = "gauge")
          #hbox=wx.BoxSizer(wx.HORIZONTAL)
          self.gauge_imag=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]*2/3),size=(size[0]/2,size[1]/3), style=gauge_style,  name = "gauge_imag")
          vbox.Add (self.gauge, 1, wx.EXPAND)
          vbox.Add (self.gauge_imag, 1, wx.EXPAND)
          #vbox.Add (hbox, 1, wx.EXPAND)
        self.sizer = vbox
        self.SetSizer (self.sizer)
        self.SetAutoLayout (True)
        self.sizer.Fit (self)
        self.label=label
        #self.y_range = None
        self.numbersink = numbersink
        self.peak_hold = False
        self.peak_vals = None
        #self.SetEnableGrid (True)
        # self.SetEnableZoom (True)
        # self.SetBackgroundColour ('black')
        
        #self.build_popup_menu()
        
        EVT_DATA_EVENT (self, self.set_data)
        wx.EVT_CLOSE (self, self.on_close_window)
        #self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
        #self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
        self.input_watcher = input_watcher(numbersink.msgq, numbersink.number_size, self)
    def on_close_window (self, event):
        print "number_window:on_close_window"
        self.keep_running = False
    def set_show_gauge(self, enable):
        self.show_gauge = enable
        if enable:
          self.gauge.Show()
          if not self.numbersink.input_is_real:
            self.gauge_imag.Show()
          #print 'show'
        else:
          self.gauge.Hide()
          if not self.numbersink.input_is_real:
            self.gauge_imag.Hide()
          #print 'hide'
    def set_data (self, evt):
        numbers = evt.data
        L = len (numbers)
        if self.peak_hold:
            if self.peak_vals is None:
                self.peak_vals = numbers
            else:
                self.peak_vals = numpy.maximum(numbers, self.peak_vals)
                numbers = self.peak_vals
        if self.numbersink.input_is_real:
            real_value=numbers[0]*self.numbersink.factor + self.numbersink.base_value
            imag_value=0.0
            self.current_value=real_value
        else:
            real_value=numbers[0]*self.numbersink.factor + self.numbersink.base_value
            imag_value=numbers[1]*self.numbersink.factor + self.numbersink.base_value
            self.current_value=complex(real_value,imag_value)
        #x = max(abs(self.numbersink.sample_rate), abs(self.numbersink.base_value))
        x = max(real_value, imag_value)
        if x >= 1e9:
            sf = 1e-9
            unit_prefix = "G"
        elif x >= 1e6:
            sf = 1e-6
            unit_prefix = "M"
        elif x>= 1e3:
            sf = 1e-3
            unit_prefix = "k"
        else :
            sf = 1
            unit_prefix = ""
        #self.update_y_range ()
        if self.numbersink.input_is_real:
          showtext = "%s: %.*f %s%s" % (self.label, self.numbersink.decimal_places,real_value*sf,unit_prefix,self.numbersink.unit)
        else:
          showtext = "%s: %.*f,%.*f %s%s" % (self.label, self.numbersink.decimal_places,real_value*sf,
                                                       self.numbersink.decimal_places,imag_value*sf,unit_prefix,self.numbersink.unit)
        self.static_text.SetLabel(showtext)
        #print (int(float((real_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
        self.gauge.SetValue(int(float((real_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
        if not self.numbersink.input_is_real:
          self.gauge.SetValue(int(float((imag_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
    def set_peak_hold(self, enable):
        self.peak_hold = enable
        self.peak_vals = None
    def update_y_range (self):
        ymax = self.numbersink.ref_level
        ymin = self.numbersink.ref_level - self.numbersink.decimal_places * self.numbersink.y_divs
        self.y_range = self._axisInterval ('min', ymin, ymax)
    def on_average(self, evt):
        # print "on_average"
        self.numbersink.set_average(evt.IsChecked())
    def on_peak_hold(self, evt):
        # print "on_peak_hold"
        self.numbersink.set_peak_hold(evt.IsChecked())
# ----------------------------------------------------------------
#          	      Deprecated interfaces
# ----------------------------------------------------------------
# returns (block, win).
#   block requires a single input stream of float
#   win is a subclass of wxWindow
def make_number_sink_f(fg, parent, label, number_size, input_rate, ymin = 0, ymax=50):
    
    block = number_sink_f(fg, parent, label=label, number_size=number_size, sample_rate=input_rate,
                       decimal_places=(ymax - ymin)/8, ref_level=ymax)
    return (block, block.win)
# returns (block, win).
#   block requires a single input stream of gr_complex
#   win is a subclass of wxWindow
def make_number_sink_c(fg, parent, label, number_size, input_rate, ymin=0, ymax=50):
    block = number_sink_c(fg, parent, label=label, number_size=number_size, sample_rate=input_rate,
                       decimal_places=(ymax - ymin)/8, ref_level=ymax)
    return (block, block.win)
# ----------------------------------------------------------------
# Standalone test app
# ----------------------------------------------------------------
class test_app_flow_graph (stdgui.gui_flow_graph):
    def __init__(self, frame, panel, vbox, argv):
        stdgui.gui_flow_graph.__init__ (self, frame, panel, vbox, argv)
        #number_size = 256
        # build our flow graph
        input_rate = 20.48e3
        # Generate a complex sinusoid
        src1 = gr.sig_source_c (input_rate, gr.GR_SIN_WAVE, 2e3, 1)
        #src1 = gr.sig_source_c (input_rate, gr.GR_CONST_WAVE, 5.75e3, 1)
        # We add these throttle blocks so that this demo doesn't
        # suck down all the CPU available.  Normally you wouldn't use these.
        thr1 = gr.throttle(gr.sizeof_gr_complex, input_rate)
        #sink1 = number_sink_c (self, panel, label="Complex Data", number_size=number_size,
        #                    sample_rate=input_rate, base_value=100e3,
        #                    ref_level=0, decimal_places=3)
        #vbox.Add (sink1.win, 1, wx.EXPAND)
        #self.connect (src1, thr1, sink1)
        src2 = gr.sig_source_f (input_rate, gr.GR_SIN_WAVE, 2e3, 1)
        #src2 = gr.sig_source_f (input_rate, gr.GR_CONST_WAVE, 5.75e3, 1)
        thr2 = gr.throttle(gr.sizeof_float, input_rate)
        sink2 = number_sink_f (self, panel, unit='Hz',label="Real Data", avg_alpha=0.001,#number_size=number_size*2,
                            sample_rate=input_rate, base_value=100e3,
                            ref_level=0, decimal_places=3)
        vbox.Add (sink2.win, 1, wx.EXPAND)
        sink3 = number_sink_c (self, panel, unit='V',label="Complex Data", avg_alpha=0.001,#number_size=number_size*2,
                            sample_rate=input_rate, base_value=0,
                            ref_level=0, decimal_places=3)
        vbox.Add (sink3.win, 1, wx.EXPAND)
        self.connect (src2, thr2, sink2)
        self.connect (src1, thr1, sink3)
def main ():
    app = stdgui.stdapp (test_app_flow_graph,
                         "Number Sink Test App")
    app.MainLoop ()
if __name__ == '__main__':
    main ()
 | 
	gpl-3.0 | 1,420,374,950,807,376,000 | 40.161238 | 158 | 0.585328 | false | 
| 
	icebreaker/pyGLox | 
	demo/particlesystem.py | 
	1 | 
	1664 | 
	"""
	Copyright (c) 2011, Mihail Szabolcs
	All rights reserved.
	See LICENSE for more information.
"""
import random
import math
from pyglet.gl import *
class Particle(object):
	def __init__(self):
		self.p = [0,0,0]
		self.a = 1
		self.dx = (random.random() - 0.5)
		self.dy = (random.random() - 0.5)
		
	def update(self, dt):
		self.p[0] += self.dx * dt
		self.p[1] += math.fabs(self.dy / 3 * dt)
		
		self.a -= math.fabs(self.dx * 4) * dt
		self.a -= math.fabs(self.dy / 2) * dt
		if self.a <= 0:
			self.p = [0,0,0]
			self.a = 1
			self.dx = (random.random() - 0.5)
			self.dy = (random.random() - 0.5)
	def draw(self):
		#glColor4f(1, 0.6, 0.0, self.a)
		glColor4f(0.65, 0.0, 0.15, self.a)
		glVertex3f(self.p[0], self.p[1], self.p[2])
class ParticleSystem(object):
	def __init__(self, texture, n=512, p=Particle):
		self.texture = texture		
		self.n = n
		self.particles = []
		for i in range(n):
			self.particles.append(p())
	def update(self, dt):
		for i in range(self.n):
			self.particles[i].update(dt)
	def draw(self):
		self.texture.bind()
		glEnable(GL_BLEND)
		glBlendFunc(GL_SRC_ALPHA, GL_ONE)
		glEnable(GL_POINT_SPRITE)
		glPointSize(60.0)
		glTexEnvi(GL_POINT_SPRITE, GL_COORD_REPLACE, GL_TRUE);
		#glPointParameterf(GL_POINT_FADE_THRESHOLD_SIZE, 60.0);
		#glPointParameterf(GL_POINT_SIZE_MIN, 30.0);
		#glPointParameterf(GL_POINT_SIZE_MAX, 40.0);
		
		glDisable(GL_DEPTH_TEST)
		glDepthMask(GL_FALSE)
		for i in range(self.n):
			glBegin(GL_POINTS)
			self.particles[i].draw()
			glEnd()
		glDepthMask(GL_TRUE)
		glEnable(GL_DEPTH_TEST)
		glDisable(GL_POINT_SPRITE)
		glDisable(GL_BLEND)
		self.texture.release()
		glColor4f(1,1,1,1)
 | 
	mit | 7,980,364,552,582,119,000 | 21.794521 | 57 | 0.640625 | false | 
| 
	dnaextrim/django_adminlte_x | 
	adminlte/static/plugins/datatables/extensions/Responsive/examples/initialisation/index.html.py | 
	1 | 
	2778 | 
	XXXXXXXXX XXXXX
XXXXXX
XXXXXX
	XXXXX XXXXXXXXXXXXXXXX
	XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
	XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	XXXXXXXXXXXXXXXXX XXXXXXXX X XXXXXXXXXXXXXXXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
	XXXX XXXXXXXXXXXXXXXXXX
		XXXXXXXXX
			XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
			XXXX XXXXXXXXXXXXX
				XXXXXXXXXXXXX XXX XX XXX XX X XXXXXXXXX XX X XXXXXX XX XXXXXXXXX XXXXXXXXX
				XXX XXXXXXXXXXXXXXXXX
					XXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
					XXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXX
					XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXX
					XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
				XXXXX
				XXXXXXX XXX XX XXXXXXXX XXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX XXXXXXXXXXXX
			XXXXXX
		XXXXXXXXXX
	XXXXXX
	XXXXXXXXX
		XXXX XXXXXXXXXXXXXXX
			XXXX XXXXXXXXXXXXXXXXXXXXXXX
			XXXX XXXXXXXXXXXXXX
				XXXX XXXXXXXXXXXX
					XXXX XXXXXXXXXXXXXXXXXX
						XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
						XXX XXXXXXXXXXXX
							XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
							XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
							XXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
							XXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
							XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
						XXXXX
					XXXXXX
				XXXXXX
				XXXX XXXXXXXXXXXXXXXXX
					XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
					XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
					XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
					XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
					XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
				XXXXXX
			XXXXXX
		XXXXXX
	XXXXXXXXXX
XXXXXXX
XXXXXXX | 
	mit | -3,221,245,733,640,435,000 | 41.753846 | 160 | 0.861411 | false | 
| 
	mne-tools/mne-python | 
	mne/tests/test_annotations.py | 
	1 | 
	53208 | 
	# Authors: Jaakko Leppakangas <[email protected]>
#          Robert Luke <[email protected]>
#
# License: BSD 3 clause
from collections import OrderedDict
from datetime import datetime, timezone
from itertools import repeat
import sys
import os.path as op
import pytest
from pytest import approx
from numpy.testing import (assert_equal, assert_array_equal,
                           assert_array_almost_equal, assert_allclose)
import numpy as np
import mne
from mne import (create_info, read_annotations, annotations_from_events,
                 events_from_annotations)
from mne import Epochs, Annotations
from mne.utils import (requires_version,
                       catch_logging, requires_pandas)
from mne.utils import (assert_and_remove_boundary_annot, _raw_annot,
                       _dt_to_stamp, _stamp_to_dt, check_version)
from mne.io import read_raw_fif, RawArray, concatenate_raws
from mne.annotations import (_sync_onset, _handle_meas_date,
                             _read_annotations_txt_parse_header)
from mne.datasets import testing
data_dir = op.join(testing.data_path(download=False), 'MEG', 'sample')
fif_fname = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data',
                    'test_raw.fif')
first_samps = pytest.mark.parametrize('first_samp', (0, 10000))
needs_pandas = pytest.mark.skipif(
    not check_version('pandas'), reason='Needs pandas')
# On Windows, datetime.fromtimestamp throws an error for negative times.
# We mimic this behavior on non-Windows platforms for ease of testing.
class _windows_datetime(datetime):
    @classmethod
    def fromtimestamp(cls, timestamp, tzinfo=None):
        if timestamp < 0:
            raise OSError('[Errno 22] Invalid argument')
        return datetime.fromtimestamp(timestamp, tzinfo)
@pytest.fixture(scope='function')
def windows_like_datetime(monkeypatch):
    """Ensure datetime.fromtimestamp is Windows-like."""
    if not sys.platform.startswith('win'):
        monkeypatch.setattr('mne.annotations.datetime', _windows_datetime)
    yield
def test_basics():
    """Test annotation class."""
    raw = read_raw_fif(fif_fname)
    assert raw.annotations is not None
    assert len(raw.annotations.onset) == 0
    pytest.raises(IOError, read_annotations, fif_fname)
    onset = np.array(range(10))
    duration = np.ones(10)
    description = np.repeat('test', 10)
    dt = raw.info['meas_date']
    assert isinstance(dt, datetime)
    stamp = _dt_to_stamp(dt)
    # Test time shifts.
    for orig_time in [None, dt, stamp[0], stamp]:
        annot = Annotations(onset, duration, description, orig_time)
        if orig_time is None:
            assert annot.orig_time is None
        else:
            assert isinstance(annot.orig_time, datetime)
            assert annot.orig_time.tzinfo is timezone.utc
    pytest.raises(ValueError, Annotations, onset, duration, description[:9])
    pytest.raises(ValueError, Annotations, [onset, 1], duration, description)
    pytest.raises(ValueError, Annotations, onset, [duration, 1], description)
    # Test combining annotations with concatenate_raws
    raw2 = raw.copy()
    delta = raw.times[-1] + 1. / raw.info['sfreq']
    orig_time = (stamp[0] + stamp[1] * 1e-6 + raw2._first_time)
    offset = _dt_to_stamp(_handle_meas_date(raw2.info['meas_date']))
    offset = offset[0] + offset[1] * 1e-6
    offset = orig_time - offset
    assert_allclose(offset, raw._first_time)
    annot = Annotations(onset, duration, description, orig_time)
    assert annot.orig_time is not None
    assert ' segments' in repr(annot)
    raw2.set_annotations(annot)
    assert_allclose(raw2.annotations.onset, onset + offset)
    assert raw2.annotations is not annot
    assert raw2.annotations.orig_time is not None
    concatenate_raws([raw, raw2])
    assert_and_remove_boundary_annot(raw)
    assert_allclose(onset + offset + delta, raw.annotations.onset, rtol=1e-5)
    assert_array_equal(annot.duration, raw.annotations.duration)
    assert_array_equal(raw.annotations.description, np.repeat('test', 10))
def test_annot_sanitizing(tmpdir):
    """Test description sanitizing."""
    annot = Annotations([0], [1], ['a;:b'])
    fname = str(tmpdir.join('custom-annot.fif'))
    annot.save(fname)
    annot_read = read_annotations(fname)
    _assert_annotations_equal(annot, annot_read)
    # make sure pytest raises error on char-sequence that is not allowed
    with pytest.raises(ValueError, match='in description not supported'):
        Annotations([0], [1], ['a{COLON}b'])
def test_raw_array_orig_times():
    """Test combining with RawArray and orig_times."""
    data = np.random.randn(2, 1000) * 10e-12
    sfreq = 100.
    info = create_info(ch_names=['MEG1', 'MEG2'], ch_types=['grad'] * 2,
                       sfreq=sfreq)
    meas_date = _handle_meas_date(np.pi)
    info['meas_date'] = meas_date
    raws = []
    for first_samp in [12300, 100, 12]:
        raw = RawArray(data.copy(), info, first_samp=first_samp)
        ants = Annotations([1., 2.], [.5, .5], 'x', np.pi + first_samp / sfreq)
        raw.set_annotations(ants)
        raws.append(raw)
    assert_allclose(raws[0].annotations.onset, [124, 125])
    raw = RawArray(data.copy(), info)
    assert not len(raw.annotations)
    raw.set_annotations(Annotations([1.], [.5], 'x', None))
    assert_allclose(raw.annotations.onset, [1.])
    raws.append(raw)
    raw = concatenate_raws(raws, verbose='debug')
    assert raw.info['meas_date'] == raw.annotations.orig_time == meas_date
    assert_and_remove_boundary_annot(raw, 3)
    assert_array_equal(raw.annotations.onset, [124., 125., 134., 135.,
                                               144., 145., 154.])
    raw.annotations.delete(2)
    assert_array_equal(raw.annotations.onset, [124., 125., 135., 144.,
                                               145., 154.])
    raw.annotations.append(5, 1.5, 'y')
    assert_array_equal(raw.annotations.onset,
                       [5., 124., 125., 135., 144., 145., 154.])
    assert_array_equal(raw.annotations.duration,
                       [1.5, .5, .5, .5, .5, .5, .5])
    assert_array_equal(raw.annotations.description,
                       ['y', 'x', 'x', 'x', 'x', 'x', 'x'])
    # These three things should be equivalent
    stamp = _dt_to_stamp(raw.info['meas_date'])
    orig_time = _handle_meas_date(stamp)
    for empty_annot in (
            Annotations([], [], [], stamp),
            Annotations([], [], [], orig_time),
            Annotations([], [], [], None),
            None):
        raw.set_annotations(empty_annot)
        assert isinstance(raw.annotations, Annotations)
        assert len(raw.annotations) == 0
        assert raw.annotations.orig_time == orig_time
def test_crop(tmpdir):
    """Test cropping with annotations."""
    raw = read_raw_fif(fif_fname)
    events = mne.find_events(raw)
    onset = events[events[:, 2] == 1, 0] / raw.info['sfreq']
    duration = np.full_like(onset, 0.5)
    description = ['bad %d' % k for k in range(len(onset))]
    annot = mne.Annotations(onset, duration, description,
                            orig_time=raw.info['meas_date'])
    raw.set_annotations(annot)
    split_time = raw.times[-1] / 2. + 2.
    split_idx = len(onset) // 2 + 1
    raw_cropped_left = raw.copy().crop(0., split_time - 1. / raw.info['sfreq'])
    assert_array_equal(raw_cropped_left.annotations.description,
                       raw.annotations.description[:split_idx])
    assert_allclose(raw_cropped_left.annotations.duration,
                    raw.annotations.duration[:split_idx])
    assert_allclose(raw_cropped_left.annotations.onset,
                    raw.annotations.onset[:split_idx])
    raw_cropped_right = raw.copy().crop(split_time, None)
    assert_array_equal(raw_cropped_right.annotations.description,
                       raw.annotations.description[split_idx:])
    assert_allclose(raw_cropped_right.annotations.duration,
                    raw.annotations.duration[split_idx:])
    assert_allclose(raw_cropped_right.annotations.onset,
                    raw.annotations.onset[split_idx:])
    raw_concat = mne.concatenate_raws([raw_cropped_left, raw_cropped_right],
                                      verbose='debug')
    assert_allclose(raw_concat.times, raw.times)
    assert_allclose(raw_concat[:][0], raw[:][0], atol=1e-20)
    assert_and_remove_boundary_annot(raw_concat)
    # Ensure we annotations survive round-trip crop->concat
    assert_array_equal(raw_concat.annotations.description,
                       raw.annotations.description)
    for attr in ('onset', 'duration'):
        assert_allclose(getattr(raw_concat.annotations, attr),
                        getattr(raw.annotations, attr),
                        err_msg='Failed for %s:' % (attr,))
    raw.set_annotations(None)  # undo
    # Test concatenating annotations with and without orig_time.
    raw2 = raw.copy()
    raw.set_annotations(Annotations([45.], [3], 'test', raw.info['meas_date']))
    raw2.set_annotations(Annotations([2.], [3], 'BAD', None))
    expected_onset = [45., 2. + raw._last_time]
    raw = concatenate_raws([raw, raw2])
    assert_and_remove_boundary_annot(raw)
    assert_array_almost_equal(raw.annotations.onset, expected_onset, decimal=2)
    # Test IO
    tempdir = str(tmpdir)
    fname = op.join(tempdir, 'test-annot.fif')
    raw.annotations.save(fname)
    annot_read = read_annotations(fname)
    for attr in ('onset', 'duration'):
        assert_allclose(getattr(annot_read, attr),
                        getattr(raw.annotations, attr))
    assert annot_read.orig_time == raw.annotations.orig_time
    assert_array_equal(annot_read.description, raw.annotations.description)
    annot = Annotations((), (), ())
    annot.save(fname, overwrite=True)
    pytest.raises(IOError, read_annotations, fif_fname)  # none in old raw
    annot = read_annotations(fname)
    assert isinstance(annot, Annotations)
    assert len(annot) == 0
    annot.crop()  # test if cropping empty annotations doesn't raise an error
    # Test that empty annotations can be saved with an object
    fname = op.join(tempdir, 'test_raw.fif')
    raw.set_annotations(annot)
    raw.save(fname)
    raw_read = read_raw_fif(fname)
    assert isinstance(raw_read.annotations, Annotations)
    assert len(raw_read.annotations) == 0
    raw.set_annotations(None)
    raw.save(fname, overwrite=True)
    raw_read = read_raw_fif(fname)
    assert raw_read.annotations is not None
    assert len(raw_read.annotations.onset) == 0
@first_samps
def test_chunk_duration(first_samp):
    """Test chunk_duration."""
    # create dummy raw
    raw = RawArray(data=np.empty([10, 10], dtype=np.float64),
                   info=create_info(ch_names=10, sfreq=1.),
                   first_samp=first_samp)
    raw.info['meas_date'] = _handle_meas_date(0)
    raw.set_annotations(Annotations(description='foo', onset=[0],
                                    duration=[10], orig_time=None))
    assert raw.annotations.orig_time == raw.info['meas_date']
    assert_allclose(raw.annotations.onset, [first_samp])
    # expected_events = [[0, 0, 1], [0, 0, 1], [1, 0, 1], [1, 0, 1], ..
    #                    [9, 0, 1], [9, 0, 1]]
    expected_events = np.atleast_2d(np.repeat(range(10), repeats=2)).T
    expected_events = np.insert(expected_events, 1, 0, axis=1)
    expected_events = np.insert(expected_events, 2, 1, axis=1)
    expected_events[:, 0] += first_samp
    events, events_id = events_from_annotations(raw, chunk_duration=.5,
                                                use_rounding=False)
    assert_array_equal(events, expected_events)
    # test chunk durations that do not fit equally in annotation duration
    expected_events = np.zeros((3, 3))
    expected_events[:, -1] = 1
    expected_events[:, 0] = np.arange(0, 9, step=3) + first_samp
    events, events_id = events_from_annotations(raw, chunk_duration=3.)
    assert_array_equal(events, expected_events)
def test_events_from_annotation_orig_time_none():
    """Tests events_from_annotation with orig_time None and first_sampe > 0."""
    # Create fake data
    sfreq, duration_s = 100, 10
    data = np.random.RandomState(42).randn(1, sfreq * duration_s)
    info = mne.create_info(ch_names=['EEG1'], ch_types=['eeg'], sfreq=sfreq)
    raw = mne.io.RawArray(data, info)
    # Add annotation toward the end
    onset = [8]
    duration = [1]
    description = ['0']
    annots = mne.Annotations(onset, duration, description)
    raw = raw.set_annotations(annots)
    # Crop start of raw
    raw.crop(tmin=7)
    # Extract epochs
    events, event_ids = mne.events_from_annotations(raw)
    epochs = mne.Epochs(
        raw, events, tmin=0, tmax=1, baseline=None, on_missing='warning')
    # epochs is empty
    assert_array_equal(epochs.get_data()[0], data[:, 800:901])
def test_crop_more():
    """Test more cropping."""
    raw = mne.io.read_raw_fif(fif_fname).crop(0, 11).load_data()
    raw._data[:] = np.random.RandomState(0).randn(*raw._data.shape)
    onset = np.array([0.47058824, 2.49773765, 6.67873287, 9.15837097])
    duration = np.array([0.89592767, 1.13574672, 1.09954739, 0.48868752])
    annotations = mne.Annotations(onset, duration, 'BAD')
    raw.set_annotations(annotations)
    assert len(raw.annotations) == 4
    delta = 1. / raw.info['sfreq']
    offset = raw.first_samp * delta
    raw_concat = mne.concatenate_raws(
        [raw.copy().crop(0, 4 - delta),
         raw.copy().crop(4, 8 - delta),
         raw.copy().crop(8, None)])
    assert_allclose(raw_concat.times, raw.times)
    assert_allclose(raw_concat[:][0], raw[:][0])
    assert raw_concat.first_samp == raw.first_samp
    assert_and_remove_boundary_annot(raw_concat, 2)
    assert len(raw_concat.annotations) == 4
    assert_array_equal(raw_concat.annotations.description,
                       raw.annotations.description)
    assert_allclose(raw.annotations.duration, duration)
    assert_allclose(raw_concat.annotations.duration, duration)
    assert_allclose(raw.annotations.onset, onset + offset)
    assert_allclose(raw_concat.annotations.onset, onset + offset,
                    atol=1. / raw.info['sfreq'])
@testing.requires_testing_data
def test_read_brainstorm_annotations():
    """Test reading for Brainstorm events file."""
    fname = op.join(data_dir, 'events_sample_audvis_raw_bst.mat')
    annot = read_annotations(fname)
    assert len(annot) == 238
    assert annot.onset.min() > 40  # takes into account first_samp
    assert np.unique(annot.description).size == 5
@first_samps
def test_raw_reject(first_samp):
    """Test raw data getter with annotation reject."""
    sfreq = 100.
    info = create_info(['a', 'b', 'c', 'd', 'e'], sfreq, ch_types='eeg')
    raw = RawArray(np.ones((5, 15000)), info, first_samp=first_samp)
    with pytest.warns(RuntimeWarning, match='outside the data range'):
        raw.set_annotations(Annotations([2, 100, 105, 148],
                                        [2, 8, 5, 8], 'BAD'))
    data, times = raw.get_data([0, 1, 3, 4], 100, 11200,  # 1-112 sec
                               'omit', return_times=True)
    bad_times = np.concatenate([np.arange(200, 400),
                                np.arange(10000, 10800),
                                np.arange(10500, 11000)])
    expected_times = np.setdiff1d(np.arange(100, 11200), bad_times) / sfreq
    assert_allclose(times, expected_times)
    # with orig_time and complete overlap
    raw = read_raw_fif(fif_fname)
    raw.set_annotations(Annotations(onset=[1, 4, 5] + raw._first_time,
                                    duration=[1, 3, 1],
                                    description='BAD',
                                    orig_time=raw.info['meas_date']))
    t_stop = 18.
    assert raw.times[-1] > t_stop
    n_stop = int(round(t_stop * raw.info['sfreq']))
    n_drop = int(round(4 * raw.info['sfreq']))
    assert len(raw.times) >= n_stop
    data, times = raw.get_data(range(10), 0, n_stop, 'omit', True)
    assert data.shape == (10, n_stop - n_drop)
    assert times[-1] == raw.times[n_stop - 1]
    assert_array_equal(data[:, -100:], raw[:10, n_stop - 100:n_stop][0])
    data, times = raw.get_data(range(10), 0, n_stop, 'NaN', True)
    assert_array_equal(data.shape, (10, n_stop))
    assert times[-1] == raw.times[n_stop - 1]
    t_1, t_2 = raw.time_as_index([1, 2], use_rounding=True)
    assert np.isnan(data[:, t_1:t_2]).all()  # 1s -2s
    assert not np.isnan(data[:, :t_1].any())
    assert not np.isnan(data[:, t_2:].any())
    assert_array_equal(data[:, -100:], raw[:10, n_stop - 100:n_stop][0])
    assert_array_equal(raw.get_data(), raw[:][0])
    # Test _sync_onset
    times = [10, -88, 190]
    onsets = _sync_onset(raw, times)
    assert_array_almost_equal(onsets, times - raw.first_samp /
                              raw.info['sfreq'])
    assert_array_almost_equal(times, _sync_onset(raw, onsets, True))
@first_samps
def test_annotation_filtering(first_samp):
    """Test that annotations work properly with filtering."""
    # Create data with just a DC component
    data = np.ones((1, 1000))
    info = create_info(1, 1000., 'eeg')
    raws = [RawArray(data * (ii + 1), info, first_samp=first_samp)
            for ii in range(4)]
    kwargs_pass = dict(l_freq=None, h_freq=50., fir_design='firwin')
    kwargs_stop = dict(l_freq=50., h_freq=None, fir_design='firwin')
    # lowpass filter, which should not modify the data
    raws_pass = [raw.copy().filter(**kwargs_pass) for raw in raws]
    # highpass filter, which should zero it out
    raws_stop = [raw.copy().filter(**kwargs_stop) for raw in raws]
    # concat the original and the filtered segments
    raws_concat = concatenate_raws([raw.copy() for raw in raws])
    raws_zero = raws_concat.copy().apply_function(lambda x: x * 0)
    raws_pass_concat = concatenate_raws(raws_pass)
    raws_stop_concat = concatenate_raws(raws_stop)
    # make sure we did something reasonable with our individual-file filtering
    assert_allclose(raws_concat[0][0], raws_pass_concat[0][0], atol=1e-14)
    assert_allclose(raws_zero[0][0], raws_stop_concat[0][0], atol=1e-14)
    # ensure that our Annotations cut up the filtering properly
    raws_concat_pass = raws_concat.copy().filter(skip_by_annotation='edge',
                                                 **kwargs_pass)
    assert_allclose(raws_concat[0][0], raws_concat_pass[0][0], atol=1e-14)
    raws_concat_stop = raws_concat.copy().filter(skip_by_annotation='edge',
                                                 **kwargs_stop)
    assert_allclose(raws_zero[0][0], raws_concat_stop[0][0], atol=1e-14)
    # one last test: let's cut out a section entirely:
    # here the 1-3 second window should be skipped
    raw = raws_concat.copy()
    raw.annotations.append(1. + raw._first_time, 2., 'foo')
    with catch_logging() as log:
        raw.filter(l_freq=50., h_freq=None, fir_design='firwin',
                   skip_by_annotation='foo', verbose='info')
    log = log.getvalue()
    assert '2 contiguous segments' in log
    raw.annotations.append(2. + raw._first_time, 1., 'foo')  # shouldn't change
    with catch_logging() as log:
        raw.filter(l_freq=50., h_freq=None, fir_design='firwin',
                   skip_by_annotation='foo', verbose='info')
    log = log.getvalue()
    assert '2 contiguous segments' in log
    # our filter will zero out anything not skipped:
    mask = np.concatenate((np.zeros(1000), np.ones(2000), np.zeros(1000)))
    expected_data = raws_concat[0][0][0] * mask
    assert_allclose(raw[0][0][0], expected_data, atol=1e-14)
    # Let's try another one
    raw = raws[0].copy()
    raw.set_annotations(Annotations([0.], [0.5], ['BAD_ACQ_SKIP']))
    my_data, times = raw.get_data(reject_by_annotation='omit',
                                  return_times=True)
    assert_allclose(times, raw.times[500:])
    assert my_data.shape == (1, 500)
    raw_filt = raw.copy().filter(skip_by_annotation='bad_acq_skip',
                                 **kwargs_stop)
    expected = data.copy()
    expected[:, 500:] = 0
    assert_allclose(raw_filt[:][0], expected, atol=1e-14)
    raw = raws[0].copy()
    raw.set_annotations(Annotations([0.5], [0.5], ['BAD_ACQ_SKIP']))
    my_data, times = raw.get_data(reject_by_annotation='omit',
                                  return_times=True)
    assert_allclose(times, raw.times[:500])
    assert my_data.shape == (1, 500)
    raw_filt = raw.copy().filter(skip_by_annotation='bad_acq_skip',
                                 **kwargs_stop)
    expected = data.copy()
    expected[:, :500] = 0
    assert_allclose(raw_filt[:][0], expected, atol=1e-14)
@first_samps
def test_annotation_omit(first_samp):
    """Test raw.get_data with annotations."""
    data = np.concatenate([np.ones((1, 1000)), 2 * np.ones((1, 1000))], -1)
    info = create_info(1, 1000., 'eeg')
    raw = RawArray(data, info, first_samp=first_samp)
    raw.set_annotations(Annotations([0.5], [1], ['bad']))
    expected = raw[0][0]
    assert_allclose(raw.get_data(reject_by_annotation=None), expected)
    # nan
    expected[0, 500:1500] = np.nan
    assert_allclose(raw.get_data(reject_by_annotation='nan'), expected)
    got = np.concatenate([raw.get_data(start=start, stop=stop,
                                       reject_by_annotation='nan')
                          for start, stop in ((0, 1000), (1000, 2000))], -1)
    assert_allclose(got, expected)
    # omit
    expected = expected[:, np.isfinite(expected[0])]
    assert_allclose(raw.get_data(reject_by_annotation='omit'), expected)
    got = np.concatenate([raw.get_data(start=start, stop=stop,
                                       reject_by_annotation='omit')
                          for start, stop in ((0, 1000), (1000, 2000))], -1)
    assert_allclose(got, expected)
    pytest.raises(ValueError, raw.get_data, reject_by_annotation='foo')
def test_annotation_epoching():
    """Test that annotations work properly with concatenated edges."""
    # Create data with just a DC component
    data = np.ones((1, 1000))
    info = create_info(1, 1000., 'eeg')
    raw = concatenate_raws([RawArray(data, info) for ii in range(3)])
    assert raw.annotations is not None
    assert len(raw.annotations) == 4
    assert np.in1d(raw.annotations.description, ['BAD boundary']).sum() == 2
    assert np.in1d(raw.annotations.description, ['EDGE boundary']).sum() == 2
    assert_array_equal(raw.annotations.duration, 0.)
    events = np.array([[a, 0, 1] for a in [0, 500, 1000, 1500, 2000]])
    epochs = Epochs(raw, events, tmin=0, tmax=0.999, baseline=None,
                    preload=True)  # 1000 samples long
    assert_equal(len(epochs.drop_log), len(events))
    assert_equal(len(epochs), 3)
    assert_equal([0, 2, 4], epochs.selection)
def test_annotation_concat():
    """Test if two Annotations objects can be concatenated."""
    a = Annotations([1, 2, 3], [5, 5, 8], ["a", "b", "c"])
    b = Annotations([11, 12, 13], [1, 2, 2], ["x", "y", "z"])
    # test + operator (does not modify a or b)
    c = a + b
    assert_array_equal(c.onset, [1, 2, 3, 11, 12, 13])
    assert_array_equal(c.duration, [5, 5, 8, 1, 2, 2])
    assert_array_equal(c.description, ["a", "b", "c", "x", "y", "z"])
    assert_equal(len(a), 3)
    assert_equal(len(b), 3)
    assert_equal(len(c), 6)
    # test += operator (modifies a in place)
    a += b
    assert_array_equal(a.onset, [1, 2, 3, 11, 12, 13])
    assert_array_equal(a.duration, [5, 5, 8, 1, 2, 2])
    assert_array_equal(a.description, ["a", "b", "c", "x", "y", "z"])
    assert_equal(len(a), 6)
    assert_equal(len(b), 3)
    # test += operator (modifies a in place)
    b._orig_time = _handle_meas_date(1038942070.7201)
    with pytest.raises(ValueError, match='orig_time should be the same'):
        a += b
def test_annotations_crop():
    """Test basic functionality of annotation crop."""
    onset = np.arange(1, 10)
    duration = np.full_like(onset, 10)
    description = ["yy"] * onset.shape[0]
    a = Annotations(onset=onset,
                    duration=duration,
                    description=description,
                    orig_time=0)
    # cropping window larger than annotations --> do not modify
    a_ = a.copy().crop(tmin=-10, tmax=42)
    assert_array_equal(a_.onset, a.onset)
    assert_array_equal(a_.duration, a.duration)
    # cropping with left shifted window
    with pytest.warns(None) as w:
        a_ = a.copy().crop(tmin=0, tmax=4.2)
    assert_array_equal(a_.onset, [1., 2., 3., 4.])
    assert_allclose(a_.duration, [3.2, 2.2, 1.2, 0.2])
    assert len(w) == 0
    # cropping with right shifted window
    with pytest.warns(None) as w:
        a_ = a.copy().crop(tmin=17.8, tmax=22)
    assert_array_equal(a_.onset, [17.8, 17.8])
    assert_allclose(a_.duration, [0.2, 1.2])
    assert len(w) == 0
    # cropping with centered small window
    a_ = a.copy().crop(tmin=11, tmax=12)
    assert_array_equal(a_.onset, [11, 11, 11, 11, 11, 11, 11, 11, 11])
    assert_array_equal(a_.duration, [0, 1, 1, 1, 1, 1, 1, 1, 1])
    # cropping with out-of-bounds window
    with pytest.warns(None) as w:
        a_ = a.copy().crop(tmin=42, tmax=100)
    assert_array_equal(a_.onset, [])
    assert_array_equal(a_.duration, [])
    assert len(w) == 0
    # test error raising
    with pytest.raises(ValueError, match='tmax should be greater than.*tmin'):
        a.copy().crop(tmin=42, tmax=0)
    # test warnings
    with pytest.warns(RuntimeWarning, match='Omitted .* were outside'):
        a.copy().crop(tmin=42, tmax=100, emit_warning=True)
    with pytest.warns(RuntimeWarning, match='Limited .* expanding outside'):
        a.copy().crop(tmin=0, tmax=12, emit_warning=True)
@testing.requires_testing_data
def test_events_from_annot_in_raw_objects():
    """Test basic functionality of events_fron_annot for raw objects."""
    raw = read_raw_fif(fif_fname)
    events = mne.find_events(raw)
    event_id = {
        'Auditory/Left': 1,
        'Auditory/Right': 2,
        'Visual/Left': 3,
        'Visual/Right': 4,
        'Visual/Smiley': 32,
        'Motor/Button': 5
    }
    event_map = {v: k for k, v in event_id.items()}
    annot = Annotations(onset=raw.times[events[:, 0] - raw.first_samp],
                        duration=np.zeros(len(events)),
                        description=[event_map[vv] for vv in events[:, 2]],
                        orig_time=None)
    raw.set_annotations(annot)
    events2, event_id2 = \
        events_from_annotations(raw, event_id=event_id, regexp=None)
    assert_array_equal(events, events2)
    assert_equal(event_id, event_id2)
    events3, event_id3 = \
        events_from_annotations(raw, event_id=None, regexp=None)
    assert_array_equal(events[:, 0], events3[:, 0])
    assert set(event_id.keys()) == set(event_id3.keys())
    # ensure that these actually got sorted properly
    expected_event_id = {
        desc: idx + 1 for idx, desc in enumerate(sorted(event_id.keys()))}
    assert event_id3 == expected_event_id
    first = np.unique(events3[:, 2])
    second = np.arange(1, len(event_id) + 1, 1).astype(first.dtype)
    assert_array_equal(first, second)
    first = np.unique(list(event_id3.values()))
    second = np.arange(1, len(event_id) + 1, 1).astype(first.dtype)
    assert_array_equal(first, second)
    events4, event_id4 =\
        events_from_annotations(raw, event_id=None, regexp='.*Left')
    expected_event_id4 = {k: v for k, v in event_id.items() if 'Left' in k}
    assert_equal(event_id4.keys(), expected_event_id4.keys())
    expected_events4 = events[(events[:, 2] == 1) | (events[:, 2] == 3)]
    assert_array_equal(expected_events4[:, 0], events4[:, 0])
    events5, event_id5 = \
        events_from_annotations(raw, event_id=event_id, regexp='.*Left')
    expected_event_id5 = {k: v for k, v in event_id.items() if 'Left' in k}
    assert_equal(event_id5, expected_event_id5)
    expected_events5 = events[(events[:, 2] == 1) | (events[:, 2] == 3)]
    assert_array_equal(expected_events5, events5)
    with pytest.raises(ValueError, match='not find any of the events'):
        events_from_annotations(raw, regexp='not_there')
    with pytest.raises(ValueError, match='Invalid type for event_id'):
        events_from_annotations(raw, event_id='wrong')
    # concat does not introduce BAD or EDGE
    raw_concat = concatenate_raws([raw.copy(), raw.copy()])
    _, event_id = events_from_annotations(raw_concat)
    assert isinstance(event_id, dict)
    assert len(event_id) > 0
    for kind in ('BAD', 'EDGE'):
        assert '%s boundary' % kind in raw_concat.annotations.description
        for key in event_id.keys():
            assert kind not in key
    # remove all events
    raw.set_annotations(None)
    events7, _ = events_from_annotations(raw)
    assert_array_equal(events7, np.empty((0, 3), dtype=int))
def test_events_from_annot_onset_alingment():
    """Test events and annotations onset are the same."""
    raw = _raw_annot(meas_date=1, orig_time=1.5)
    #       sec  0        1        2        3
    #       raw  .        |--------xxxxxxxxx
    #     annot  .             |---xx
    # raw.annot  .        |--------xx
    #   latency  .        0        1        2
    #            .                 0        0
    assert raw.annotations.orig_time == _handle_meas_date(1)
    assert raw.annotations.onset[0] == 1
    assert raw.first_samp == 10
    event_latencies, event_id = events_from_annotations(raw)
    assert event_latencies[0, 0] == 10
    assert raw.first_samp == event_latencies[0, 0]
def _create_annotation_based_on_descr(description, annotation_start_sampl=0,
                                      duration=0, orig_time=0):
    """Create a raw object with annotations from descriptions.
    The returning raw object contains as many annotations as description given.
    All starting at `annotation_start_sampl`.
    """
    # create dummy raw
    raw = RawArray(data=np.empty([10, 10], dtype=np.float64),
                   info=create_info(ch_names=10, sfreq=1000.),
                   first_samp=0)
    raw.set_meas_date(0)
    # create dummy annotations based on the descriptions
    onset = raw.times[annotation_start_sampl]
    onset_matching_desc = np.full_like(description, onset, dtype=type(onset))
    duration_matching_desc = np.full_like(description, duration,
                                          dtype=type(duration))
    annot = Annotations(description=description,
                        onset=onset_matching_desc,
                        duration=duration_matching_desc,
                        orig_time=orig_time)
    if duration != 0:
        with pytest.warns(RuntimeWarning, match='Limited.*expanding outside'):
            # duration 0.1s is larger than the raw data expand
            raw.set_annotations(annot)
    else:
        raw.set_annotations(annot)
    # Make sure that set_annotations(annot) works
    assert all(raw.annotations.onset == onset)
    if duration != 0:
        expected_duration = (len(raw.times) / raw.info['sfreq']) - onset
    else:
        expected_duration = 0
    _duration = raw.annotations.duration[0]
    assert _duration == approx(expected_duration)
    assert all(raw.annotations.duration == _duration)
    assert all(raw.annotations.description == description)
    return raw
def test_event_id_function_default():
    """Test[unit_test] for event_id_function default in event_from_annotations.
    The expected behavior is give numeric label for all those annotations not
    present in event_id, starting at 1.
    """
    # No event_id given
    description = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
    expected_event_id = dict(zip(description, range(1, 100)))
    expected_events = np.array([[3, 3, 3, 3, 3, 3, 3],
                                [0, 0, 0, 0, 0, 0, 0],
                                [1, 2, 3, 4, 5, 6, 7]]).T
    raw = _create_annotation_based_on_descr(description,
                                            annotation_start_sampl=3,
                                            duration=100)
    events, event_id = events_from_annotations(raw, event_id=None)
    assert_array_equal(events, expected_events)
    assert event_id == expected_event_id
def test_event_id_function_using_custom_function():
    """Test [unit_test] arbitrary function to create the ids."""
    def _constant_id(*args, **kwargs):
        return 42
    description = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
    expected_event_id = dict(zip(description, repeat(42)))
    expected_events = np.repeat([[0, 0, 42]], len(description), axis=0)
    raw = _create_annotation_based_on_descr(description)
    events, event_id = events_from_annotations(raw, event_id=_constant_id)
    assert_array_equal(events, expected_events)
    assert event_id == expected_event_id
# Test for IO with .csv files
def _assert_annotations_equal(a, b, tol=0):
    __tracebackhide__ = True
    assert_allclose(a.onset, b.onset, rtol=0, atol=tol)
    assert_allclose(a.duration, b.duration, rtol=0, atol=tol)
    assert_array_equal(a.description, b.description)
    assert_array_equal(a.ch_names, b.ch_names)
    a_orig_time = a.orig_time
    b_orig_time = b.orig_time
    assert a_orig_time == b_orig_time
_ORIG_TIME = datetime.fromtimestamp(1038942071.7201, timezone.utc)
@pytest.fixture(scope='function', params=('ch_names', 'fmt'))
def dummy_annotation_file(tmpdir_factory, ch_names, fmt):
    """Create csv file for testing."""
    if fmt == 'csv':
        content = ("onset,duration,description\n"
                   "2002-12-03 19:01:11.720100,1.0,AA\n"
                   "2002-12-03 19:01:20.720100,2.425,BB")
    elif fmt == 'txt':
        content = ("# MNE-Annotations\n"
                   "# orig_time : 2002-12-03 19:01:11.720100\n"
                   "# onset, duration, description\n"
                   "0, 1, AA \n"
                   "9, 2.425, BB")
    else:
        assert fmt == 'fif'
        content = Annotations(
            [0, 9], [1, 2.425], ['AA', 'BB'], orig_time=_ORIG_TIME)
    if ch_names:
        if isinstance(content, Annotations):
            # this is a bit of a hack but it works
            content.ch_names[:] = ((), ('MEG0111', 'MEG2563'))
        else:
            content = content.splitlines()
            content[-3] += ',ch_names'
            content[-2] += ','
            content[-1] += ',MEG0111:MEG2563'
            content = '\n'.join(content)
    fname = tmpdir_factory.mktemp('data').join(f'annotations-annot.{fmt}')
    if isinstance(content, str):
        fname.write(content)
    else:
        content.save(fname)
    return fname
@pytest.mark.parametrize('ch_names', (False, True))
@pytest.mark.parametrize('fmt', [
    pytest.param('csv', marks=needs_pandas),
    'txt',
    'fif'
])
def test_io_annotation(dummy_annotation_file, tmpdir, fmt, ch_names):
    """Test CSV, TXT, and FIF input/output (which support ch_names)."""
    annot = read_annotations(dummy_annotation_file)
    assert annot.orig_time == _ORIG_TIME
    kwargs = dict(orig_time=_ORIG_TIME)
    if ch_names:
        kwargs['ch_names'] = ((), ('MEG0111', 'MEG2563'))
    _assert_annotations_equal(
        annot, Annotations([0., 9.], [1., 2.425], ['AA', 'BB'], **kwargs),
        tol=1e-6)
    # Now test writing
    fname = tmpdir.join(f'annotations-annot.{fmt}')
    annot.save(fname)
    annot2 = read_annotations(fname)
    _assert_annotations_equal(annot, annot2)
    # Now without an orig_time
    annot._orig_time = None
    annot.save(fname, overwrite=True)
    annot2 = read_annotations(fname)
    _assert_annotations_equal(annot, annot2)
@requires_version('pandas')
def test_broken_csv(tmpdir):
    """Test broken .csv that does not use timestamps."""
    content = ("onset,duration,description\n"
               "1.,1.0,AA\n"
               "3.,2.425,BB")
    fname = tmpdir.join('annotations_broken.csv')
    fname.write(content)
    with pytest.warns(RuntimeWarning, match='save your CSV as a TXT'):
        read_annotations(fname)
# Test for IO with .txt files
@pytest.fixture(scope='function', params=('ch_names',))
def dummy_annotation_txt_file(tmpdir_factory, ch_names):
    """Create txt file for testing."""
    content = ("3.14, 42, AA \n"
               "6.28, 48, BB")
    if ch_names:
        content = content.splitlines()
        content[0] = content[0].strip() + ','
        content[1] = content[1].strip() + ', MEG0111:MEG2563'
        content = '\n'.join(content)
    fname = tmpdir_factory.mktemp('data').join('annotations.txt')
    fname.write(content)
    return fname
@pytest.mark.parametrize('ch_names', (False, True))
def test_io_annotation_txt(dummy_annotation_txt_file, tmpdir_factory,
                           ch_names):
    """Test TXT input/output without meas_date."""
    annot = read_annotations(str(dummy_annotation_txt_file))
    assert annot.orig_time is None
    kwargs = dict()
    if ch_names:
        kwargs['ch_names'] = [(), ('MEG0111', 'MEG2563')]
    _assert_annotations_equal(
        annot, Annotations([3.14, 6.28], [42., 48], ['AA', 'BB'], **kwargs))
    # Now test writing
    fname = str(tmpdir_factory.mktemp('data').join('annotations.txt'))
    annot.save(fname)
    annot2 = read_annotations(fname)
    _assert_annotations_equal(annot, annot2)
    # Now with an orig_time
    assert annot.orig_time is None
    annot._orig_time = _handle_meas_date(1038942071.7201)
    assert annot.orig_time is not None
    annot.save(fname, overwrite=True)
    annot2 = read_annotations(fname)
    assert annot2.orig_time is not None
    _assert_annotations_equal(annot, annot2)
@pytest.mark.parametrize('meas_date, out', [
    pytest.param('toto', None, id='invalid string'),
    pytest.param(None, None, id='None'),
    pytest.param(42, 42.0, id='Scalar'),
    pytest.param(3.14, 3.14, id='Float'),
    pytest.param((3, 140000), 3.14, id='Scalar touple'),
    pytest.param('2002-12-03 19:01:11.720100', 1038942071.7201,
                 id='valid iso8601 string'),
    pytest.param('2002-12-03T19:01:11.720100', None,
                 id='invalid iso8601 string')])
def test_handle_meas_date(meas_date, out):
    """Test meas date formats."""
    if out is not None:
        assert out >= 0  # otherwise it'll break on Windows
        out = datetime.fromtimestamp(out, timezone.utc)
    assert _handle_meas_date(meas_date) == out
def test_read_annotation_txt_header(tmpdir):
    """Test TXT orig_time recovery."""
    content = ("# A something \n"
               "# orig_time : 42\n"
               "# orig_time : 2002-12-03 19:01:11.720100\n"
               "# orig_time : 42\n"
               "# C\n"
               "Done")
    fname = tmpdir.join('header.txt')
    fname.write(content)
    orig_time = _read_annotations_txt_parse_header(fname)
    want = datetime.fromtimestamp(1038942071.7201, timezone.utc)
    assert orig_time == want
def test_read_annotation_txt_one_segment(tmpdir):
    """Test empty TXT input/output."""
    content = ("# MNE-Annotations\n"
               "# onset, duration, description\n"
               "3.14, 42, AA")
    fname = tmpdir.join('one-annotations.txt')
    fname.write(content)
    annot = read_annotations(fname)
    _assert_annotations_equal(annot, Annotations(3.14, 42, ['AA']))
def test_read_annotation_txt_empty(tmpdir):
    """Test empty TXT input/output."""
    content = ("# MNE-Annotations\n"
               "# onset, duration, description\n")
    fname = tmpdir.join('empty-annotations.txt')
    fname.write(content)
    annot = read_annotations(fname)
    _assert_annotations_equal(annot, Annotations([], [], []))
def test_annotations_simple_iteration():
    """Test indexing Annotations."""
    NUM_ANNOT = 5
    EXPECTED_ELEMENTS_TYPE = (np.float64, np.float64, np.str_)
    EXPECTED_ONSETS = EXPECTED_DURATIONS = [x for x in range(NUM_ANNOT)]
    EXPECTED_DESCS = [x.__repr__() for x in range(NUM_ANNOT)]
    annot = Annotations(onset=EXPECTED_ONSETS,
                        duration=EXPECTED_DURATIONS,
                        description=EXPECTED_DESCS,
                        orig_time=None)
    for ii, elements in enumerate(annot[:2]):
        assert isinstance(elements, OrderedDict)
        expected_values = (ii, ii, str(ii))
        for elem, expected_type, expected_value in zip(elements.values(),
                                                       EXPECTED_ELEMENTS_TYPE,
                                                       expected_values):
            assert np.isscalar(elem)
            assert type(elem) == expected_type
            assert elem == expected_value
@requires_version('numpy', '1.12')
def test_annotations_slices():
    """Test indexing Annotations."""
    NUM_ANNOT = 5
    EXPECTED_ONSETS = EXPECTED_DURATIONS = [x for x in range(NUM_ANNOT)]
    EXPECTED_DESCS = [x.__repr__() for x in range(NUM_ANNOT)]
    annot = Annotations(onset=EXPECTED_ONSETS,
                        duration=EXPECTED_DURATIONS,
                        description=EXPECTED_DESCS,
                        orig_time=None)
    # Indexing returns a copy. So this has no effect in annot
    annot[0]['onset'] = 42
    annot[0]['duration'] = 3.14
    annot[0]['description'] = 'foobar'
    annot[:1].onset[0] = 42
    annot[:1].duration[0] = 3.14
    annot[:1].description[0] = 'foobar'
    # Slicing with single element returns a dictionary
    for ii in EXPECTED_ONSETS:
        assert annot[ii] == dict(zip(['onset', 'duration',
                                      'description', 'orig_time'],
                                     [ii, ii, str(ii), None]))
    # Slices should give back Annotations
    for current in (annot[slice(0, None, 2)],
                    annot[[bool(ii % 2) for ii in range(len(annot))]],
                    annot[:1],
                    annot[[0, 2, 2]],
                    annot[(0, 2, 2)],
                    annot[np.array([0, 2, 2])],
                    annot[1::2],
                    ):
        assert isinstance(current, Annotations)
        assert len(current) != len(annot)
    for bad_ii in [len(EXPECTED_ONSETS), 42, 'foo']:
        with pytest.raises(IndexError):
            annot[bad_ii]
def test_sorting():
    """Test annotation sorting."""
    annot = Annotations([10, 20, 30], [1, 2, 3], 'BAD')
    # assert_array_equal(annot.onset, [0, 5, 10])
    annot.append([5, 15, 25, 35], 0.5, 'BAD')
    onset = list(range(5, 36, 5))
    duration = list(annot.duration)
    assert_array_equal(annot.onset, onset)
    assert_array_equal(annot.duration, duration)
    annot.append([10, 10], [0.1, 9], 'BAD')  # 0.1 should be before, 9 after
    want_before = onset.index(10)
    duration.insert(want_before, 0.1)
    duration.insert(want_before + 2, 9)
    onset.insert(want_before, 10)
    onset.insert(want_before, 10)
    assert_array_equal(annot.onset, onset)
    assert_array_equal(annot.duration, duration)
def test_date_none(tmpdir):
    """Test that DATE_NONE is used properly."""
    # Regression test for gh-5908
    n_chans = 139
    n_samps = 20
    data = np.random.random_sample((n_chans, n_samps))
    ch_names = ['E{}'.format(x) for x in range(n_chans)]
    ch_types = ['eeg'] * n_chans
    info = create_info(ch_names=ch_names, ch_types=ch_types, sfreq=2048)
    assert info['meas_date'] is None
    raw = RawArray(data=data, info=info)
    fname = op.join(str(tmpdir), 'test-raw.fif')
    raw.save(fname)
    raw_read = read_raw_fif(fname, preload=True)
    assert raw_read.info['meas_date'] is None
def test_negative_meas_dates(windows_like_datetime):
    """Test meas_date previous to 1970."""
    # Regression test for gh-6621
    raw = RawArray(data=np.empty((1, 1), dtype=np.float64),
                   info=create_info(ch_names=1, sfreq=1.))
    raw.set_meas_date((-908196946, 988669))
    raw.set_annotations(Annotations(description='foo', onset=[0],
                                    duration=[0], orig_time=None))
    events, _ = events_from_annotations(raw)
    assert events[:, 0] == 0
def test_crop_when_negative_orig_time(windows_like_datetime):
    """Test cropping with orig_time, tmin and tmax previous to 1970."""
    # Regression test for gh-6621
    orig_time_stamp = -908196945.011331  # 1941-03-22 11:04:14.988669
    annot = Annotations(description='foo', onset=np.arange(0, 0.999, 0.1),
                        duration=[0], orig_time=orig_time_stamp)
    stamp = _dt_to_stamp(annot.orig_time)
    assert_allclose(stamp[0] + stamp[1] * 1e-6, orig_time_stamp)
    t = stamp[0] + stamp[1] * 1e-6
    assert t == orig_time_stamp
    assert len(annot) == 10
    # do not raise
    annot.crop(verbose='debug')
    assert len(annot) == 10
    # Crop with negative tmin, tmax
    tmin, tmax = [orig_time_stamp + t for t in (0.25, .75)]
    assert tmin < 0 and tmax < 0
    crop_annot = annot.crop(tmin=tmin, tmax=tmax)
    assert_allclose(crop_annot.onset, [0.3, 0.4, 0.5, 0.6, 0.7])
    orig_dt = _stamp_to_dt(stamp)
    assert crop_annot.orig_time == orig_dt  # orig_time does not change
def test_allow_nan_durations():
    """Deal with "n/a" strings in BIDS events with nan durations."""
    raw = RawArray(data=np.empty([2, 10], dtype=np.float64),
                   info=create_info(ch_names=2, sfreq=1.),
                   first_samp=0)
    raw.set_meas_date(0)
    ons = [1, 2., 15., 17.]
    dus = [np.nan, 1., 0.5, np.nan]
    descriptions = ['A'] * 4
    onsets = np.asarray(ons, dtype=float)
    durations = np.asarray(dus, dtype=float)
    annot = mne.Annotations(onset=onsets,
                            duration=durations,
                            description=descriptions)
    with pytest.warns(RuntimeWarning, match='Omitted 2 annotation'):
        raw.set_annotations(annot)
@testing.requires_testing_data
def test_annotations_from_events():
    """Test events to annotations conversion."""
    raw = read_raw_fif(fif_fname)
    events = mne.find_events(raw)
    # 1. Automatic event description
    # -------------------------------------------------------------------------
    annots = annotations_from_events(events, raw.info['sfreq'],
                                     first_samp=raw.first_samp,
                                     orig_time=None)
    assert len(annots) == events.shape[0]
    # Convert back to events
    raw.set_annotations(annots)
    events_out, _ = events_from_annotations(raw, event_id=int)
    assert_array_equal(events, events_out)
    # 2. Explicit event mapping
    # -------------------------------------------------------------------------
    event_desc = {1: 'one', 2: 'two', 3: 'three', 32: None}
    annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                     event_desc=event_desc,
                                     first_samp=raw.first_samp,
                                     orig_time=None)
    assert np.all([a in ['one', 'two', 'three'] for a in annots.description])
    assert len(annots) == events[events[:, 2] <= 3].shape[0]
    # 3. Pass list
    # -------------------------------------------------------------------------
    event_desc = [1, 2, 3]
    annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                     event_desc=event_desc,
                                     first_samp=raw.first_samp,
                                     orig_time=None)
    assert np.all([a in ['1', '2', '3'] for a in annots.description])
    assert len(annots) == events[events[:, 2] <= 3].shape[0]
    # 4. Try passing callable
    # -------------------------------------------------------------------------
    event_desc = lambda d: 'event{}'.format(d)  # noqa:E731
    annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                     event_desc=event_desc,
                                     first_samp=raw.first_samp,
                                     orig_time=None)
    assert np.all(['event' in a for a in annots.description])
    assert len(annots) == events.shape[0]
    # 5. Pass numpy array
    # -------------------------------------------------------------------------
    event_desc = np.array([[1, 2, 3], [1, 2, 3]])
    with pytest.raises(ValueError, match='event_desc must be 1D'):
        annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                         event_desc=event_desc,
                                         first_samp=raw.first_samp,
                                         orig_time=None)
    with pytest.raises(ValueError, match='Invalid type for event_desc'):
        annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                         event_desc=1,
                                         first_samp=raw.first_samp,
                                         orig_time=None)
    event_desc = np.array([1, 2, 3])
    annots = annotations_from_events(events, sfreq=raw.info['sfreq'],
                                     event_desc=event_desc,
                                     first_samp=raw.first_samp,
                                     orig_time=None)
    assert np.all([a in ['1', '2', '3'] for a in annots.description])
    assert len(annots) == events[events[:, 2] <= 3].shape[0]
def test_repr():
    """Test repr of Annotations."""
    # short annotation repr (< 79 characters)
    r = repr(Annotations(range(3), [0] * 3, list("abc")))
    assert r == '<Annotations | 3 segments: a (1), b (1), c (1)>'
    # long annotation repr (> 79 characters, will be shortened)
    r = repr(Annotations(range(14), [0] * 14, list("abcdefghijklmn")))
    assert r == ('<Annotations | 14 segments: a (1), b (1), c (1), d (1), '
                 'e (1), f (1), g ...>')
    # empty Annotations
    r = repr(Annotations([], [], []))
    assert r == '<Annotations | 0 segments>'
@requires_pandas
def test_annotation_to_data_frame():
    """Test annotation class to data frame conversion."""
    onset = np.arange(1, 10)
    durations = np.full_like(onset, [4, 5, 6, 4, 5, 6, 4, 5, 6])
    description = ["yy"] * onset.shape[0]
    a = Annotations(onset=onset,
                    duration=durations,
                    description=description,
                    orig_time=0)
    df = a.to_data_frame()
    for col in ['onset', 'duration', 'description']:
        assert col in df.columns
    assert df.description[0] == 'yy'
    assert (df.onset[1] - df.onset[0]).seconds == 1
    assert df.groupby('description').count().onset['yy'] == 9
def test_annotation_ch_names():
    """Test annotation ch_names updating and pruning."""
    info = create_info(10, 1000., 'eeg')
    raw = RawArray(np.zeros((10, 1000)), info)
    onset = [0.1, 0.3, 0.6]
    duration = [0.05, 0.1, 0.2]
    description = ['first', 'second', 'third']
    ch_names = [[], raw.ch_names[4:6], raw.ch_names[5:7]]
    annot = Annotations(onset, duration, description, ch_names=ch_names)
    raw.set_annotations(annot)
    # renaming
    rename = {name: name + 'new' for name in raw.ch_names}
    raw_2 = raw.copy().rename_channels(rename)
    for ch_rename, ch in zip(raw_2.annotations.ch_names, annot.ch_names):
        assert all(name in raw_2.ch_names for name in ch_rename)
        assert all(name in raw.ch_names for name in ch)
        assert not any(name in raw.ch_names for name in ch_rename)
        assert not any(name in raw_2.ch_names for name in ch)
    raw_2.rename_channels({val: key for key, val in rename.items()})
    _assert_annotations_equal(raw.annotations, raw_2.annotations)
    # dropping
    raw_2.drop_channels(raw.ch_names[5:])
    annot_pruned = raw_2.annotations
    assert len(raw_2.annotations) == 2  # dropped the last one
    assert raw_2.annotations.ch_names[1] == tuple(raw.ch_names[4:5])
    for ch_drop in raw_2.annotations.ch_names:
        assert all(name in raw_2.ch_names for name in ch_drop)
    with pytest.raises(ValueError, match='channel name in annotations missin'):
        raw_2.set_annotations(annot)
    with pytest.warns(RuntimeWarning, match='channel name in annotations mis'):
        raw_2.set_annotations(annot, on_missing='warn')
    assert raw_2.annotations is not annot_pruned
    _assert_annotations_equal(raw_2.annotations, annot_pruned)
def test_annotation_duration_setting():
    """Test annotation duration setting works."""
    a = Annotations([1, 2, 3], [5, 5, 8], ["a", "b", "c"])
    assert len(a) == 3
    assert a.duration[0] == 5
    assert a.duration[2] == 8
    a.set_durations({"a": 3})
    assert a.duration[0] == 3
    assert a.duration[2] == 8
    a.set_durations({"a": 313, "c": 18})
    assert a.duration[0] == 313
    assert a.duration[2] == 18
    a.set_durations({"a": 1, "b": 13})
    assert a.duration[0] == 1
    assert a.duration[1] == 13
    a = Annotations([1, 2, 3], [5, 5, 8], ["a", "b", "c"])
    assert len(a) == 3
    assert a.duration[0] == 5
    assert a.duration[2] == 8
    a.set_durations(7.2)
    assert a.duration[0] == 7.2
    assert a.duration[2] == 7.2
    with pytest.raises(ValueError, match="mapping missing from data"):
        a.set_durations({"aaa": 2.2})
    with pytest.raises(ValueError, match="<class 'set'> was provided"):
        a.set_durations({"aaa", 2.2})
 | 
	bsd-3-clause | -2,619,110,020,765,893,600 | 39.431611 | 79 | 0.596583 | false | 
| 
	andrewsosa/hackfsu_com | 
	api/api/views/hackathon/get/stats.py | 
	2 | 
	4468 | 
	"""
    Get public statistics for current hackathon
"""
from django import forms
from django.http.request import HttpRequest
from hackfsu_com.views.generic import ApiView
from hackfsu_com.util import acl
from api.models import Hackathon, HackerInfo, MentorInfo, JudgeInfo, OrganizerInfo, AttendeeStatus
from django.utils import timezone
class ResponseForm(forms.Form):
    hackathon_name = forms.CharField()
    hackathon_start = forms.DateField()
    hackathon_end = forms.DateField()
    hackers_registered = forms.IntegerField()
    hackers_approved = forms.IntegerField()
    hackers_rsvp = forms.IntegerField()
    hackers_checked_in = forms.IntegerField()
    mentors_registered = forms.IntegerField()
    mentors_approved = forms.IntegerField()
    mentors_rsvp = forms.IntegerField()
    mentors_checked_in = forms.IntegerField()
    judges_registered = forms.IntegerField()
    judges_approved = forms.IntegerField()
    judges_rsvp = forms.IntegerField()
    judges_checked_in = forms.IntegerField()
    organizers_registered = forms.IntegerField()
    organizers_approved = forms.IntegerField()
    organizers_rsvp = forms.IntegerField()
    organizers_checked_in = forms.IntegerField()
class StatsView(ApiView):
    response_form_class = ResponseForm
    http_method_names = ['get']
    access_manager = acl.AccessManager(acl_accept=[acl.group_user])
    def work(self, request, req: dict, res: dict):
        ch = Hackathon.objects.current()
        res['hackathon_name'] = ch.name
        res['hackathon_start'] = ch.start_date
        res['hackathon_end'] = ch.end_date
        if (timezone.now().date() - ch.start_date).days >= 0 or \
                OrganizerInfo.objects.filter(hackathon=ch, user=request.user, approved=True).exists():
            res['hackers_registered'] = HackerInfo.objects.filter(hackathon=ch).count()
            res['hackers_approved'] = HackerInfo.objects.filter(hackathon=ch, approved=True).count()
            res['hackers_rsvp'] = \
                HackerInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
            res['hackers_checked_in'] = HackerInfo.objects.filter(
                hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
            res['mentors_registered'] = MentorInfo.objects.filter(hackathon=ch).count()
            res['mentors_approved'] = MentorInfo.objects.filter(hackathon=ch, approved=True).count()
            res['mentors_rsvp'] = \
                MentorInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
            res['mentors_checked_in'] = MentorInfo.objects.filter(
                hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
            res['judges_registered'] = JudgeInfo.objects.filter(hackathon=ch).count()
            res['judges_approved'] = JudgeInfo.objects.filter(hackathon=ch, approved=True).count()
            res['judges_rsvp'] = \
                JudgeInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
            res['judges_checked_in'] = JudgeInfo.objects.filter(
                hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
            res['organizers_registered'] = OrganizerInfo.objects.filter(hackathon=ch).count()
            res['organizers_approved'] = OrganizerInfo.objects.filter(hackathon=ch, approved=True).count()
            res['organizers_rsvp'] = \
                OrganizerInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
            res['organizers_checked_in'] = OrganizerInfo.objects.filter(
                hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
        else:
            res['hackers_registered'] = -1
            res['hackers_approved'] = -1
            res['hackers_rsvp'] = -1
            res['hackers_checked_in'] = -1
            res['mentors_registered'] = -1
            res['mentors_approved'] = -1
            res['mentors_rsvp'] = -1
            res['mentors_checked_in'] = -1
            res['judges_registered'] = -1
            res['judges_approved'] = -1
            res['judges_rsvp'] = -1
            res['judges_checked_in'] = -1
            res['organizers_registered'] = -1
            res['organizers_approved'] = -1
            res['organizers_rsvp'] = -1
            res['organizers_checked_in'] = -1
 | 
	apache-2.0 | 7,364,637,496,740,350,000 | 47.043011 | 113 | 0.643912 | false | 
| 
	sevein/archivematica | 
	src/dashboard/src/contrib/utils.py | 
	1 | 
	2104 | 
	# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica.  If not, see <http://www.gnu.org/licenses/>.
import os
def get_directory_size(path='.'):
    total_size = 0
    for dirpath, dirnames, filenames in os.walk(path):
        for f in filenames:
            fp = os.path.join(dirpath, f)
            total_size += os.path.getsize(fp)
    return total_size
def get_directory_name(directory, default=None):
    """
      Attempts to extract a directory name given a transfer or SIP path. Expected format:
      %sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
      Given this example, this function would return 'ImagesSIP'.
      If the optional `default` keyword argument is passed in, the provided value will be used if no name can be extracted.
    """
    import re
    try:
        return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
    except:
        pass
    try:
        return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
    except:
        pass
    if directory:
        return directory
    else:
        return default
def get_directory_name_from_job(jobs):
    try:
        job = jobs[0]
    # No jobs yet, e.g. not started; there will be no directory name yet
    except IndexError:
        return "(Unnamed)"
    return get_directory_name(job.directory, default=job.sipuuid)
 | 
	agpl-3.0 | 4,646,505,071,088,285,000 | 32.396825 | 123 | 0.681559 | false | 
| 
	basak/netkeyscript | 
	netkeyscript-send.py | 
	1 | 
	2002 | 
	#!/usr/bin/python
# Copyright 2012 Robie Basak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
NETKEYSCRIPT_PROTO_PASSPHRASE = 0
import argparse
import struct
import sys
from scapy.all import (
    Ether,
    IPv6,
    UDP,
    sendp
)
def send(dst, sport, dport, payload, iface):
    ether = Ether()
    ip = IPv6(dst=dst)
    udp = UDP(sport=sport, dport=dport)
    sendp(ether / ip / udp / payload, iface=iface)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--iface', default='eth0')
    parser.add_argument('--sport', default=30621, type=int)
    parser.add_argument('--dport', default=30621, type=int)
    parser.add_argument('--dest', default='ff02::1')
    args = parser.parse_args()
    payload_command = struct.pack('b', NETKEYSCRIPT_PROTO_PASSPHRASE)
    payload = payload_command + sys.stdin.read()
    send(dst=args.dest, sport=args.sport, dport=args.dport,
         payload=payload, iface=args.iface)
if __name__ == '__main__':
    main()
 | 
	mit | -4,701,440,638,697,542,000 | 32.932203 | 78 | 0.720779 | false | 
| 
	algolia/algoliasearch-django | 
	tests/settings.py | 
	1 | 
	2390 | 
	"""
Django settings for core project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'MillisecondsMatter'
DEBUG = False
# Application definition
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
    'algoliasearch_django',
    'tests'
)
MIDDLEWARE = [
    'django.contrib.sessions.middleware.SessionMiddleware',
    'django.middleware.common.CommonMiddleware',
    'django.middleware.csrf.CsrfViewMiddleware',
    'django.contrib.auth.middleware.AuthenticationMiddleware',
    'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
    'django.contrib.messages.middleware.MessageMiddleware',
    'django.middleware.clickjacking.XFrameOptionsMiddleware',
    'django.middleware.security.SecurityMiddleware',
]
TEMPLATES = [
    {
        "BACKEND": "django.template.backends.django.DjangoTemplates",
        "DIRS": [],
        "APP_DIRS": True,
        "OPTIONS": {
            "context_processors": [
                "django.template.context_processors.debug",
                "django.template.context_processors.request",
                "django.contrib.auth.context_processors.auth",
                "django.contrib.messages.context_processors.messages",
            ],
        },
    },
]
ROOT_URLCONF = 'tests.urls'
# Database
DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.sqlite3',
        'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
    }
}
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
def safe_index_name(name):
    if 'TRAVIS' not in os.environ:
        return name
    job = os.environ['TRAVIS_JOB_NUMBER']
    return '{}_travis-{}'.format(name, job)
# AlgoliaSearch settings
ALGOLIA = {
    'APPLICATION_ID': os.getenv('ALGOLIA_APPLICATION_ID'),
    'API_KEY': os.getenv('ALGOLIA_API_KEY'),
    'INDEX_PREFIX': 'test',
    'INDEX_SUFFIX': safe_index_name('django'),
    'RAISE_EXCEPTIONS': True
}
 | 
	mit | 176,922,860,896,906,560 | 25.555556 | 70 | 0.667364 | false | 
| 
	all-of-us/raw-data-repository | 
	rdr_service/tools/update_release_tracker.py | 
	1 | 
	2190 | 
	#!/usr/bin/env python
"""Updates JIRA release notes when deploying to an environment.
This requires the
    JIRA_API_USER_PASSWORD and
    JIRA_API_USER_NAME
environment variables to be set, and flags for version and instance to be provided.
"""
import logging
import os
import sys
import jira
from rdr_service.main_util import configure_logging, get_parser
_JIRA_INSTANCE_URL = "https://precisionmedicineinitiative.atlassian.net/"
# Release tickets are moved from our usual project, DA, to the PD project
# for change approval, so for stable/prod releases look for tickets there.
_JIRA_PROJECT_ID = "PD"
def _connect_to_jira(jira_username, jira_password):
    return jira.JIRA(_JIRA_INSTANCE_URL, basic_auth=(jira_username, jira_password))
def main(args):
    jira_username = os.getenv("JIRA_API_USER_NAME")
    jira_password = os.getenv("JIRA_API_USER_PASSWORD")
    if not jira_username or not jira_password:
        logging.error("JIRA_API_USER_NAME and JIRA_API_USER_PASSWORD variables must be set. Exiting.")
        sys.exit(-1)
    jira_connection = _connect_to_jira(jira_username, jira_password)
    summary = "Release tracker for %s" % args.version
    issues = jira_connection.search_issues(
        'project = "%s" AND summary ~ "%s" ORDER BY created DESC' % (_JIRA_PROJECT_ID, summary)
    )
    if issues:
        if len(issues) > 1:
            logging.warning(
                "Found multiple release tracker matches, using newest. %s",
                ", ".join("[%s] %s" % (issue.key, issue.fields().summary) for issue in issues),
            )
        issue = issues[0]
        jira_connection.add_comment(issue, args.comment)
        logging.info("Updated issue %s", issue.key)
        sys.exit(0)
    else:
        logging.error("No issue found with summary %r in project %r; exiting.", summary, _JIRA_PROJECT_ID)
        sys.exit(-1)
if __name__ == "__main__":
    configure_logging()
    parser = get_parser()
    parser.add_argument("--version", help="The version of the app being deployed (e.g. v0-1-rc21", required=True)
    parser.add_argument("--comment", type=str, help="The comment to add to the issue", required=True)
    main(parser.parse_args())
 | 
	bsd-3-clause | -2,994,243,296,558,638,600 | 36.118644 | 113 | 0.66895 | false | 
| 
	mark-r-g/hydrus | 
	tests/test_model.py | 
	1 | 
	1242 | 
	# Mark Gatheman <[email protected]>
#
# This file is part of Hydrus.
#
# Hydrus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# 
# Hydrus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License
# along with Hydrus.  If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from numpy.testing import assert_approx_equal
from scipy.stats import norm
from hypothesis import given
from hydrus.model import Lvm
from tests import strat_1d, strat_pos_1d
@given(strat_1d, strat_1d, strat_1d, strat_1d, strat_pos_1d, strat_1d)
def test_preds_ll(alpha, mu, gamma, err, num, w):
    current_impl = Lvm.preds_ll(alpha, mu, gamma, err, num, w)
    simple_impl = np.nansum(w * norm.logpdf(num, mu+gamma*alpha, err))
    simple_impl += np.sum(norm.logpdf(alpha))
    assert_approx_equal(current_impl, simple_impl)
 | 
	gpl-3.0 | -6,105,238,677,537,839,000 | 37.8125 | 70 | 0.741546 | false | 
| 
	google/earthengine-community | 
	samples/python/apidocs/ee-dictionary-select.py | 
	1 | 
	1194 | 
	# Copyright 2021 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START earthengine__apidocs__ee_dictionary_select]
# A dictionary (e.g. results of ee.Image.reduceRegion of an S2 image).
dic = ee.Dictionary({
    'B1': 182,
    'B2': 219,
    'B3': 443
})
print('Select keys by name:', dic.select(['B1', 'B2']).getInfo())
print('Select keys by regex:', dic.select(['B[1-2]']).getInfo())
dic_select = dic.select(**{'selectors': ['B1', 'B2', 'Region'],
                           'ignoreMissing': True})
print('Set ignoreMissing as true to avoid an unmatched key error:',
      dic_select.getInfo())
# [END earthengine__apidocs__ee_dictionary_select]
 | 
	apache-2.0 | -7,978,989,095,025,099,000 | 38.8 | 74 | 0.697655 | false | 
| 
	AdaptivePELE/AdaptivePELE | 
	AdaptivePELE/analysis/crossValidationClustering.py | 
	1 | 
	3872 | 
	from __future__ import absolute_import, division, print_function, unicode_literals
import os
import argparse
import matplotlib.pyplot as plt
import numpy as np
import pyemma.coordinates as coor
import pyemma.msm as msm
from AdaptivePELE.freeEnergies import cluster
plt.switch_backend("pdf")
plt.style.use("ggplot")
def parse_arguments():
    """
        Create command-line interface
    """
    desc = "Plot information related to an MSM"
    parser = argparse.ArgumentParser(description=desc)
    parser.add_argument("-l", "--lagtimes", type=int, nargs="*", help="Lagtimes to analyse")
    parser.add_argument("-c", "--clusters", type=int, nargs="*", help="Number of clusters to analyse")
    parser.add_argument("-m", type=int, default=6, help="Number of eigenvalues to sum in the GMRQ")
    parser.add_argument("--tica", action="store_true", help="Whether to use TICA before clustering")
    parser.add_argument("--tica_lag", type=int, default=30, help="Lagtime for the TICA estimation")
    parser.add_argument("--out_path", type=str, default="", help="Path to store the output")
    args = parser.parse_args()
    return args.lagtimes, args.m, args.tica, args.tica_lag, args.out_path, args.clusters
def main(lagtimes, clusters, m, tica_lag, tica, output_path):
    trajectoryFolder = "allTrajs"
    trajectoryBasename = "traj*"
    stride = 1
    if output_path and not os.path.exists(output_path):
        os.makedirs(output_path)
    scores_path = os.path.join(output_path, "scores")
    if not os.path.exists(scores_path):
        os.makedirs(scores_path)
    data, _ = cluster.loadTrajFiles(trajectoryFolder, trajectoryBasename)
    if tica:
        tica_obj = coor.tica(data, lag=tica_lag, var_cutoff=0.9, kinetic_map=True)
        print('TICA dimension ', tica_obj.dimension())
        data = tica_obj.get_output()
    for tau in lagtimes:
        scores = []
        scores_cv = []
        print("Estimating MSM with %d lagtime" % tau)
        for k in clusters:
            print("Calculating scores with %d clusters" % k)
            # cluster data
            cl = coor.cluster_kmeans(data=data, k=k, max_iter=500, stride=stride)
            try:
                MSM = msm.estimate_markov_model(cl.dtrajs, tau)
                print("MSM estimated on %d states" % MSM.nstates)
            except Exception:
                print("Estimation error in %d clusters, %d lagtime" % (k, tau))
                scores.append(0)
                scores_cv.append(np.array([0, 0, 0, 0, 0]))
                continue
            try:
                scores.append(MSM.score(MSM.dtrajs_full, score_k=m))
            except Exception:
                print("Estimation error in %d clusters, %d lagtime" % (k, tau))
                scores.append(0)
                scores_cv.append(np.array([0, 0, 0, 0, 0]))
                continue
            try:
                scores_cv.append(MSM.score_cv(MSM.dtrajs_full, score_k=m, n=5))
            except Exception:
                print("Estimation error in %d clusters, %d lagtime" % (k, tau))
                scores_cv.append(np.array([0, 0, 0, 0, 0]))
        np.save(os.path.join(scores_path, "scores_lag_%d.npy" % tau), scores)
        np.save(os.path.join(scores_path, "scores_cv_lag_%d.npy" % tau), scores_cv)
        mean_scores = [sc.mean() for sc in scores_cv]
        std_scores = [sc.std() for sc in scores_cv]
        plt.figure()
        plt.plot(clusters, scores, label="Training")
        plt.errorbar(clusters, mean_scores, yerr=std_scores, fmt='k', label="Testing")
        plt.xlabel("Number of states")
        plt.ylabel("Score")
        plt.legend()
        plt.savefig(os.path.join(output_path, "scores_cv_lag_%d.png" % tau))
if __name__ == "__main__":
    lags, GMRQ, use_tica, lag_tica, out_path, cluster_list = parse_arguments()
    main(lags, cluster_list, GMRQ, lag_tica, use_tica, out_path)
 | 
	mit | -8,509,077,382,591,035,000 | 44.552941 | 102 | 0.612603 | false | 
| 
	GoogleCloudPlatform/sap-deployment-automation | 
	third_party/github.com/ansible/awx/awx/main/tests/functional/api/test_unified_job_template.py | 
	1 | 
	2858 | 
	import pytest
from awx.api.versioning import reverse
from awx.main import models
@pytest.mark.django_db
def test_aliased_forward_reverse_field_searches(instance, options, get, admin):
    url = reverse('api:unified_job_template_list')
    response = options(url, None, admin)
    assert 'job_template__search' in response.data['related_search_fields']
    get(reverse("api:unified_job_template_list") + "?job_template__search=anything", user=admin, expect=200)
@pytest.mark.django_db
@pytest.mark.parametrize('model', (
    'Project',
    'JobTemplate',
    'WorkflowJobTemplate'
))
class TestUnifiedOrganization:
    def data_for_model(self, model, orm_style=False):
        data = {
            'name': 'foo',
            'organization': None
        }
        if model == 'JobTemplate':
            proj = models.Project.objects.create(
                name="test-proj",
                playbook_files=['helloworld.yml']
            )
            if orm_style:
                data['project_id'] = proj.id
            else:
                data['project'] = proj.id
            data['playbook'] = 'helloworld.yml'
            data['ask_inventory_on_launch'] = True
        return data
    def test_organization_blank_on_edit_of_orphan(self, model, admin_user, patch):
        cls = getattr(models, model)
        data = self.data_for_model(model, orm_style=True)
        obj = cls.objects.create(**data)
        patch(
            url=obj.get_absolute_url(),
            data={'name': 'foooooo'},
            user=admin_user,
            expect=200
        )
        obj.refresh_from_db()
        assert obj.name == 'foooooo'
    def test_organization_blank_on_edit_of_orphan_as_nonsuperuser(self, model, rando, patch):
        """Test case reflects historical bug where ordinary users got weird error
        message when editing an orphaned project
        """
        cls = getattr(models, model)
        data = self.data_for_model(model, orm_style=True)
        obj = cls.objects.create(**data)
        if model == 'JobTemplate':
            obj.project.admin_role.members.add(rando)
        obj.admin_role.members.add(rando)
        patch(
            url=obj.get_absolute_url(),
            data={'name': 'foooooo'},
            user=rando,
            expect=200
        )
        obj.refresh_from_db()
        assert obj.name == 'foooooo'
    def test_organization_blank_on_edit_of_normal(self, model, admin_user, patch, organization):
        cls = getattr(models, model)
        data = self.data_for_model(model, orm_style=True)
        data['organization'] = organization
        obj = cls.objects.create(**data)
        patch(
            url=obj.get_absolute_url(),
            data={'name': 'foooooo'},
            user=admin_user,
            expect=200
        )
        obj.refresh_from_db()
        assert obj.name == 'foooooo'
 | 
	apache-2.0 | -8,665,169,606,486,610,000 | 32.623529 | 108 | 0.580126 | false | 
| 
	cloew/KaoJson | 
	kao_json/Test/test_conversion_config.py | 
	1 | 
	1361 | 
	from .class_hierarchy import A, AChild, AGrandchild
from ..conversion_config import ConversionConfig
import unittest
class find(unittest.TestCase):
    """ Test cases of find """
        
    def test_matchingClassFound(self):
        """ Test that a matching class is found properly """
        expected = "Dummy Config..."
        config = ConversionConfig([{A:expected}])
        
        actual = config.find(A)
        self.assertEqual(expected, actual)
        
    def test_descendantClassFound(self):
        """ Test that a matching descendant class is found properly """
        expected = "Dummy Config..."
        config = ConversionConfig([{A:expected}])
        
        actual = config.find(AGrandchild)
        self.assertEqual(expected, actual)
        
    def test_noMatchFound(self):
        """ Test that when no match is found, None is returned """
        config = ConversionConfig([])
        self.assertIsNone(config.find(A))
class newConverter(unittest.TestCase):
    """ Test cases of newConverter """
        
    def test_converterBuilt(self):
        """ Test that the Converter was built properly """
        value = "Dummy Value..."
        config = ConversionConfig([])
        converter = config.newConverter(value)
        
        self.assertEqual(converter.value, value)
        self.assertEqual(converter.config, config) | 
	mit | -7,443,373,079,876,724,000 | 33.05 | 71 | 0.62601 | false | 
| 
	agx/git-buildpackage | 
	gbp/deb/rollbackgit.py | 
	1 | 
	5051 | 
	# vim: set fileencoding=utf-8 :
#
# (C) 2018 Guido Günther <[email protected]>
"""A git repository for Debian packages that can roll back operations"""
from .. import log
from .. git import GitRepositoryError
from . git import DebianGitRepository
class RollbackError(GitRepositoryError):
    """
    Error raised if the rollback failed
    """
    def __init__(self, errors):
        self.msg = "Automatic rollback failed"
        super(RollbackError, self).__init__(self.msg)
        self.errors = errors
    def __str__(self):
        return "%s %s" % (self.msg, self.errors)
class RollbackDebianGitRepository(DebianGitRepository):
    """
    Like a DebianGitRepository but can also perform rollbacks and knows
    about some of the inner workings upstream vcs_tag, …
    """
    def __init__(self, *args, **kwargs):
        self.rollbacks = []
        self.rollback_errors = []
        DebianGitRepository.__init__(self, *args, **kwargs)
    def has_rollbacks(self):
        return len(self.rollbacks) > 0
    def rrr(self, refname, action, reftype):
        """
        Remember ref for rollback
        @param refname: ref to roll back
        @param action: the rollback action (delete, reset, ...)
        @param reftype: the reference type (tag, branch, ...)
        """
        sha = None
        if action == 'reset':
            try:
                sha = self.rev_parse(refname)
            except GitRepositoryError as err:
                log.warn("Failed to rev-parse '%s': %s" % (refname, err))
        elif action == 'delete':
            pass
        elif action == 'abortmerge':
            pass
        else:
            raise GitRepositoryError("Unknown action '%s' for %s '%s'" % (action, reftype, refname))
        self.rollbacks.append((refname, reftype, action, sha))
    def rrr_branch(self, branchname, action='reset-or-delete'):
        if action == 'reset-or-delete':
            if self.has_branch(branchname):
                return self.rrr(branchname, 'reset', 'branch')
            else:
                return self.rrr(branchname, 'delete', 'branch')
        else:
            return self.rrr(branchname, action, 'branch')
    def rrr_tag(self, tagname, action='delete'):
        return self.rrr(tagname, action, 'tag')
    def rrr_merge(self, commit, action='abortmerge'):
        return self.rrr(commit, action, 'commit')
    def rollback(self):
        """
        Perform a complete rollback
        Try to roll back as much as possible and remember what failed.
        """
        for (name, reftype, action, sha) in self.rollbacks:
            try:
                if action == 'delete':
                    log.info("Rolling back %s '%s' by deleting it" % (reftype, name))
                    if reftype == 'tag':
                        self.delete_tag(name)
                    elif reftype == 'branch':
                        self.delete_branch(name)
                    else:
                        raise GitRepositoryError("Don't know how to delete %s '%s'" % (reftype, name))
                elif action == 'reset' and reftype == 'branch':
                    log.info('Rolling back branch %s by resetting it to %s' % (name, sha))
                    self.update_ref("refs/heads/%s" % name, sha, msg="gbp import-orig: failure rollback of %s" % name)
                elif action == 'abortmerge':
                    if self.is_in_merge():
                        log.info('Rolling back failed merge of %s' % name)
                        self.abort_merge()
                    else:
                        log.info("Nothing to rollback for merge of '%s'" % name)
                else:
                    raise GitRepositoryError("Don't know how to %s %s '%s'" % (action, reftype, name))
            except GitRepositoryError as e:
                self.rollback_errors.append((name, reftype, action, sha, e))
        if self.rollback_errors:
            raise RollbackError(self.rollback_errors)
    # Wrapped methods for rollbacks
    def create_tag(self, *args, **kwargs):
        name = kwargs['name']
        ret = super(RollbackDebianGitRepository, self).create_tag(*args, **kwargs)
        self.rrr_tag(name)
        return ret
    def commit_dir(self, *args, **kwargs):
        import_branch = kwargs['branch']
        self.rrr_branch(import_branch)
        return super(RollbackDebianGitRepository, self).commit_dir(*args, **kwargs)
    def create_branch(self, *args, **kwargs):
        branch = kwargs['branch']
        ret = super(RollbackDebianGitRepository, self).create_branch(*args, **kwargs)
        self.rrr_branch(branch, 'delete')
        return ret
    def merge(self, *args, **kwargs):
        commit = args[0] if args else kwargs['commit']
        try:
            return super(RollbackDebianGitRepository, self).merge(*args, **kwargs)
        except GitRepositoryError:
            # Only cleanup in the error case to undo working copy
            # changes. Resetting the refs handles the other cases.
            self.rrr_merge(commit)
            raise
 | 
	gpl-2.0 | 3,977,356,638,823,929,300 | 36.671642 | 118 | 0.565571 | false | 
| 
	conversationai/wikidetox | 
	experimental/conversation_go_awry/feature_extraction/utils/write_lexicons.py | 
	1 | 
	1163 | 
	import json
lexicon = {
    'pron_me': ['i', "i'd", "i'll", "i'm", "i've", 'id', 'im', 'ive',
                'me', 'mine', 'my', 'myself'],
    'pron_we': ["let's", 'lets', 'our', 'ours', 'ourselves', 'us',
                'we', "we'd", "we'll", "we're", "we've", 'weve'],
    'pron_you': ["y'all", 'yall', 'you', "you'd", "you'll", "you're",
                 "you've", 'youd', 'youll', 'your', 'youre', 'yours',
                 'youve'],
    'pron_3rd': ['he', "he'd", "he's", 'hed', 'her', 'hers', 'herself',
                 'hes', 'him', 'himself', 'his', 'she', "she'd",
                 "she'll", "she's", 'shes'],
    'pron_3rd_plural': ['their', 'them', 'themselves',
                 'they', "they'd", "they'll", "they've", 'theyd', 'theyll',
                 'theyve', "they're", "theyre"]
}
lexicon['positive'] = []
with open('liu-positive-words.txt') as f:
     for line in f:
         lexicon['positive'].append(line.strip()) 
lexicon['negative'] = []
with open('liu-negative-words.txt', encoding='ISO-8859-1') as f:
     for line in f:
         lexicon['negative'].append(line.strip())
with open('lexicons', 'w') as w:
    json.dump(lexicon, w)
 | 
	apache-2.0 | -5,095,081,116,083,255,000 | 40.535714 | 75 | 0.468616 | false | 
| 
	hcrlab/access_teleop | 
	limb_manipulation/src/limb_pbd_server_with_moveit_commander.py | 
	1 | 
	43291 | 
	#! /usr/bin/env python
import rospy
import math
from pprint import pprint
import numpy as np
import fetch_api
from std_msgs.msg import String, Header, ColorRGBA, Bool
from std_srvs.srv import Empty
from image_geometry import PinholeCameraModel
import tf
import tf.transformations as tft
# from tf import TransformBroadcaster
from geometry_msgs.msg import Pose, PoseStamped, Quaternion, Point, Vector3
from interactive_markers.interactive_marker_server import InteractiveMarkerServer
from visualization_msgs.msg import Marker, MarkerArray, InteractiveMarker, InteractiveMarkerControl, InteractiveMarkerFeedback
from sensor_msgs.msg import PointCloud2, JointState
from ar_track_alvar_msgs.msg import AlvarMarkers
from limb_manipulation_msgs.msg import EzgripperAccess, WebAppRequest, WebAppResponse
import moveit_commander
from moveit_python import PlanningSceneInterface
from moveit_msgs.msg import OrientationConstraint
import subprocess
from robot_controllers_msgs.msg import QueryControllerStatesAction, QueryControllerStatesGoal, ControllerState
from database import Database
import actionlib
import rosbag
import os
from colour import Color
import sys
from shared_teleop_functions_and_vars import dpx_to_distance, delta_modified_stamped_pose
import copy
# maximum times to retry if a transform lookup fails
TRANSFROM_LOOKUP_RETRY = 10
# colors for trajectory visualization
START_COLOR = Color("Orange")
END_COLOR = Color("Blue")
TRAJ_HIGHLIGHT_SCALE = Vector3(0.05, 0.008, 0.008)
WAYPOINT_HIGHLIGHT_SCALE = Vector3(0.05, 0.01, 0.01)  # Vector3(0.055, 0.009, 0.009)
WAYPOINT_HIGHLIGHT_COLOR = ColorRGBA(1.0, 0.0, 0.0, 0.8)
# body parts and their corresponding ID# and actions
BODY_PARTS = {0: "right wrist", 1: "lower right leg",
              2: "left wrist", 3: "lower left leg"}
ACTIONS = {0: ["right arm elbow extension", "right arm elbow flexion"],
           1: ["right leg abduction and adduction"],  # , "right leg medial rotation", "right leg lateral rotation"],
           2: ["left arm elbow extension", "left arm elbow flexion"],
           3: ["left leg abduction and adduction"]  #, "left leg medial rotation", "left leg lateral rotation"]
          }
ABBR = {"right leg abduction and adduction": "RLAA",
        # "right leg medial rotation": "RLMR", "right leg lateral rotation": "RLLR",
        "left abduction and adduction": "LLAA",
        # "left leg medial rotation": "LLMR", "left leg lateral rotation": "LLLR",
        "right arm elbow extension": "RAEE", "right arm elbow flexion": "RAEF",
        "left arm elbow extension": "LAEE", "left arm elbow flexion": "LAEF"
       }
# leg medial rotation, leg lateral rotation
# shoulder flexion, shoulder abduction, shoulder adduction, shoulder medial rotation, shoulder lateral rotation, 
# forearm pronation, forearm supination, 
# knee flexion, knee extension (seems to be performed when seated?)
def wait_for_time():
  """
    Wait for simulated time to begin.
  """
  while rospy.Time().now().to_sec() == 0:
    pass
class ArTagReader(object):
  def __init__(self):
    self.markers = []  # list of markers (update in real time)
    self.saved_markers = []  # list of markers saved (update only if update() is called)
  def callback(self, msg):
    self.markers = msg.markers
  def update(self):
    self.saved_markers = self.markers
  def get_tag(self, tag):
    """ Returns the marker with id# == tag """
    for marker in self.saved_markers:
      if marker.id == int(tag):
        result = PoseStamped()
        result.pose = marker.pose
        result.header = marker.header
        return result
    return None
  
  def get_list(self):
    """ Returns the list of saved markers """
    return self.saved_markers
class PbdServer():
  """ Server for PBD """
  def __init__(self):
    # controls of Fetch
    self._arm = fetch_api.Arm()
    self._arm_joints = fetch_api.ArmJoints()
    self._torso = fetch_api.Torso()
    self._head = fetch_api.Head()
    self._base = fetch_api.Base()
    self._fetch_gripper = fetch_api.Gripper()
    # status of the arm: relax or freeze
    self._arm_relaxed = False
    # transformation
    self._tf_listener = tf.TransformListener()
    rospy.sleep(0.1)
    # AR tag reader
    self._reader = ArTagReader()
    self._ar_sub = rospy.Subscriber("ar_pose_marker", AlvarMarkers, callback=self._reader.callback)
    # database of actions
    self._db = Database()
    self._db.load()
    # publisher and subscriber for controls of SAKE gripper
    self._sake_gripper_pub = rospy.Publisher('/ezgripper_access', EzgripperAccess, queue_size=1)
    self._sake_gripper_sub = rospy.Subscriber('/ezgripper_access_status', EzgripperAccess, callback=self._set_sake_gripper_action_status)
    # moveit: query controller
    self._controller_client = actionlib.SimpleActionClient('/query_controller_states', QueryControllerStatesAction)
    # moveit: move group commander
    moveit_commander.roscpp_initialize(sys.argv)
    moveit_robot = moveit_commander.RobotCommander()
    self._moveit_group = moveit_commander.MoveGroupCommander('arm')
    # motion planning scene
    self._planning_scene = PlanningSceneInterface('base_link')
    self._planning_scene.clear()
    # visualization
    self._viz_pub = rospy.Publisher('visualization_marker', Marker, queue_size=5)
    self._viz_markers_pub = rospy.Publisher('visualization_marker_array', MarkerArray, queue_size=5)
    # initial position of robot arm
    self._arm_initial_poses = [
        ("shoulder_pan_joint", 1.296), ("shoulder_lift_joint", 1.480), ("upperarm_roll_joint", -0.904), ("elbow_flex_joint", 2.251), 
        ("forearm_roll_joint", -2.021), ("wrist_flex_joint", -1.113), ("wrist_roll_joint", -0.864)]
    
    # orientation constraint (unused)
    self._gripper_oc = OrientationConstraint()
    self._gripper_oc.header.frame_id = 'base_link'
    self._gripper_oc.link_name = 'wrist_roll_link'
    self._gripper_oc.orientation.z = -0.707
    self._gripper_oc.orientation.w = 0.707
    self._gripper_oc.absolute_x_axis_tolerance = 0.1
    self._gripper_oc.absolute_y_axis_tolerance = 0.1
    self._gripper_oc.absolute_z_axis_tolerance = 3.14
    self._gripper_oc.weight = 1.0
    # moveit args
    self._kwargs = {
      'allowed_planning_time': 30,
      'execution_timeout': 15,
      'group_name': 'arm',
      'num_planning_attempts': 10,
      # 'orientation_constraint': self._gripper_oc,
      'replan': True,
      'replan_attempts': 5,
      'tolerance': 0.01
    }
    # current pose of gripper (used in performing actions)
    self._current_pose = None
    # bag file directory
    script_path = os.path.abspath(__file__)
    self._bag_file_dir = os.path.split(script_path)[0][:-4] + '/bags'
    # subscriber and publisher for frontend
    self._web_app_request_sub = rospy.Subscriber("web_app_request", WebAppRequest, callback=self.web_app_request_callback)
    self._web_app_response_pub = rospy.Publisher('web_app_response', WebAppResponse, queue_size=5)
    # variables representing the program state
    self._sake_gripper_attached = False
    self._sake_gripper_action_finished = False
    self._sake_gripper_effort = "100"
    self._robot_stopped = False
    self._grasp_position_ready = False
    self._grasp_type = "h_close"
    self._do_position_ready = False
    self._do_position_id = -1
    self._preview_action_abbr = ""
    self._preview_traj = []  # the trajectory being previewed currently
    self._current_waypoint_id = -1
    rospy.sleep(0.5)
  def setup(self):
    """ Handler for robot set up """
    print("\nSetting up everything, please wait...\n")
    # set robot's initial state
    self._torso.set_height(0)
    self._head.pan_tilt(0, 0.8)
    # move arm to the initial position (with collision detection)
    self._arm.move_to_joint_goal(self._arm_initial_poses, replan=True)
    print("\nThe program is ready to use :-)\n")
  def shutdown(self):
    """ Handler for robot shutdown """
    print("\nShutting down... Bye :-)\n")
    # clear display
    self._viz_markers_pub.publish(MarkerArray(markers=[]))
    # moveit
    self.freeze_arm()
    self._planning_scene.clear()
    self._arm.cancel_all_goals()
    # save the database
    self._db.save()
    # moveit: move group commander
    self._moveit_group.stop()
    moveit_commander.roscpp_shutdown()
  def attach_sake_gripper(self):
    """
      Attaches SAKE gripper to Fetch's gripper, and updates the planning scene.
    """
    self.freeze_arm()
    # attach SAKE gripper to Fetch's gripper
    self._fetch_gripper.open()  # make sure Fetch's gripper is open
    self._fetch_gripper.close()
    self._sake_gripper_attached = True
    # add SAKE gripper to the planning scene
    frame_attached_to = 'gripper_link'
    frames_okay_to_collide_with = ['gripper_link', 'l_gripper_finger_link', 'r_gripper_finger_link']
    package_path = subprocess.check_output("rospack find ezgripper_driver", shell=True).replace('\n','')
    if rospy.get_param("use_urdf"):  # use real sake gripper mesh
      # palm
      sake_palm_pose = Pose(Point(-0.01, 0, 0.05), Quaternion(-0.7, 0, 0.7, 0))
      sake_palm_mesh_file = package_path + "/meshes/visual/SAKE_Palm_Dual.stl"
      self._planning_scene.attachMesh('sake_palm', sake_palm_pose, sake_palm_mesh_file, 
                                      frame_attached_to, touch_links=frames_okay_to_collide_with)
      # fingers
      sake_finger_1_pose = Pose(Point(0, -0.03, -0.055), Quaternion(0.5, -0.5, 0.5, 0.5))
      sake_finger_1_mesh_file = package_path + "/meshes/visual/SAKE_Finger.stl"
      self._planning_scene.attachMesh('sake_finger_1', sake_finger_1_pose, sake_finger_1_mesh_file, 
                                      frame_attached_to, touch_links=frames_okay_to_collide_with)
      sake_finger_2_pose = Pose(Point(0, 0.03, -0.055), Quaternion(-0.5, -0.5, -0.5, 0.5))
      sake_finger_2_mesh_file = package_path + "/meshes/visual/SAKE_Finger.stl"
      self._planning_scene.attachMesh('sake_finger_2', sake_finger_2_pose, sake_finger_2_mesh_file, 
                                      frame_attached_to, touch_links=frames_okay_to_collide_with)
    else:  # use a box to represent the sake gripper
      self._planning_scene.attachBox('sake', 0.03, 0.09, 0.15, 0, 0, -0.05,
                                     frame_attached_to,
                                     frames_okay_to_collide_with)
      self._planning_scene.setColor('sake', 1, 0, 1)
      self._planning_scene.sendColors()
    # calibrate SAKE gripper
    self.do_sake_gripper_action("calibrate")
  def remove_sake_gripper(self):
    """
      Removes SAKE gripper from Fetch's gripper, and updates the planning scene.
    """
    self.freeze_arm()
    # remove SAKE gripper from Fetch's gripper
    self._fetch_gripper.close()  # make sure Fetch's gripper is close
    self._fetch_gripper.open()
    self._sake_gripper_attached = False
    # remove SAKE gripper from the planning scene
    if rospy.get_param("use_urdf"):
      self._planning_scene.removeAttachedObject('sake_palm')
      self._planning_scene.removeAttachedObject('sake_finger_1')
      self._planning_scene.removeAttachedObject('sake_finger_2')
    else:
      self._planning_scene.removeAttachedObject('sake')
  def update_env(self, update_octo=True):
    """
      Updates the list of markers, and scan the surroundings to build an octomap.
      Returns false if the update fails, true otherwise.
    """
    # update markers
    self._reader.update()
    # update octomap
    if update_octo:
      # clear previous octomap
      if not self._clear_octomap():
        return False
      # scan the range: pan -0.75~0.75, tilt 0~0.7
      for i in range(6):
        pan = -0.75 + 0.25 * i
        self._head.pan_tilt(pan, 0)
        rospy.sleep(2)
        self._head.pan_tilt(pan, 0.7)
        rospy.sleep(2)
        self._head.pan_tilt(pan, 0)
      # move the head back to initial position
      self._head.pan_tilt(0, 0.7)
    return True
  def do_sake_gripper_action(self, command):
    # publish response message
    if command == "calibrate":
      self._publish_server_response(msg="Calibrating SAKE gripper, please wait...")
    elif command == "h_close":
      self._publish_server_response(msg="Hard closing SAKE gripper, please wait...")
    elif command == "s_close":
      self._publish_server_response(msg="Soft closing SAKE gripper, please wait...")
    elif command == "open":
      self._publish_server_response(msg="Opening SAKE gripper, please wait...")
    else:  # [percentage open, effort]
      self._publish_server_response(msg="Closing SAKE gripper, please wait...")
    # publish command
    self._sake_gripper_pub.publish(EzgripperAccess(type=command))
    # wait for the action to finish if in real
    if not rospy.get_param("use_sim"):
      while not self._sake_gripper_action_finished:
        continue
      # finished, reset
      self._sake_gripper_action_finished = False
  def reset(self):
    """ Moves arm to its initial position and calibrates gripper """
    self.freeze_arm()
    self._arm.move_to_joint_goal(self._arm_initial_poses, replan=True)
    self.do_sake_gripper_action("calibrate")
    self._robot_stopped = False
    self._reset_program_state()
  def estop(self):
    """ Emergency stop. """
    self.relax_arm()
    self._robot_stopped = True
    self._reset_program_state()
  def preview_body_part_with_id(self, id_num):
    """
      Publishes visualization markers to mark the body part with given id.
    """
    raw_pose = self._get_tag_with_id(id_num)
    if raw_pose is not None:
      # visualize goal pose
      marker = Marker(type=Marker.CUBE,
                      id=0,
                      pose=raw_pose.pose.pose,
                      scale=Vector3(0.06, 0.06, 0.06),
                      header=raw_pose.header,
                      color=ColorRGBA(1.0, 0.75, 0.3, 0.8))
      self._viz_pub.publish(marker)
  def goto_part_with_id(self, id_num):
    """ 
      Moves arm above a body part specified by id_num.
      Returns true if succeed, false otherwise.
    """
    self.freeze_arm()
    raw_pose = self._get_tag_with_id(id_num)
    if raw_pose is not None:
      # found marker, move towards it
      self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
      # OPTION 1: pregrasp ---> grasp
      # highlight and move to the pre-grasp pose
      pre_grasp_offset = self._db.get("PREGRASP")
      pre_grasp_pose = self._move_arm_relative(raw_pose.pose.pose, raw_pose.header, offset=pre_grasp_offset, preview_only=True)
      self.highlight_waypoint(pre_grasp_pose, WAYPOINT_HIGHLIGHT_COLOR)
      if self._move_arm(pre_grasp_pose, final_state=False):
        # highlight and move to the grasp pose, clear octomap to ignore collision only at this point
        if self._clear_octomap():
          return self._move_arm(self._get_goto_pose(raw_pose), final_state=False, seed_state=self._get_seed_state())
      
      # # OPTION 2: grasp
      # return self._move_arm(self._get_goto_pose(raw_pose), final_state=False)
    # marker with id_num is not found, or some error occured
    return False
  def preview_action_with_abbr(self, abbr, id_num):
    """
      Publishes visualization markers to preview waypoints on the trajectory with given abbr.
      Saves waypoints extracted from bag file to database if the entry "abbr" doesn't exist.
      Returns the colors of waypoints with respect to the ar tag. Records the positions of
      waypoints with respect to the ar tag.
    """
    # clear previous markers
    self._viz_markers_pub.publish(MarkerArray(markers=[]))
    # check the database for the action
    waypoints = self._db.get(abbr)
    if waypoints == None or len(waypoints) == 0:
      waypoints = self._save_traj_to_db(abbr, id_num)
    self._preview_action_abbr = ""
    self._preview_traj = []
    waypoints_with_respect_to_tag = []
    if waypoints:
      raw_pose = self._get_tag_with_id(id_num)
      if raw_pose is not None:
        prev_pose = self._get_goto_pose(raw_pose)
        # markers
        marker_arr = []
        # marker color gradient
        colors = list(START_COLOR.range_to(END_COLOR, len(waypoints)))
        # visualize the trajectory
        for i in range(len(waypoints)):
          # visualize the current waypoint
          marker = Marker(type=Marker.ARROW,
                          id=i,
                          pose=prev_pose.pose,
                          scale=TRAJ_HIGHLIGHT_SCALE,
                          header=prev_pose.header,
                          color=ColorRGBA(colors[i].red, colors[i].green, colors[i].blue, 0.8))
          marker_arr.append(marker)
          # record the waypoint
          waypoints_with_respect_to_tag.append(str(colors[i].hex))
          self._preview_traj.append(prev_pose)
          if i < len(waypoints) - 1:
            # calculate offset between the current point on the trajectory and the next point
            r_pos = waypoints[i].pose  # current point
            r_mat = self._pose_to_transform(r_pos)
            w_mat = self._pose_to_transform(waypoints[i + 1].pose)
            offset = np.dot(np.linalg.inv(r_mat), w_mat)
            prev_pose = self._move_arm_relative(prev_pose.pose, prev_pose.header, offset=offset, preview_only=True)
        # publish new markers
        self._viz_markers_pub.publish(MarkerArray(markers=marker_arr))
        # record the action name
        self._preview_action_abbr = abbr
    return waypoints_with_respect_to_tag
  def highlight_waypoint(self, highlight_pose, color):
    """ Publishes a marker at the specified location. """
    marker = Marker(type=Marker.ARROW,
                    id=0,
                    pose=highlight_pose.pose,
                    scale=WAYPOINT_HIGHLIGHT_SCALE,
                    header=highlight_pose.header,
                    color=color)
    self._viz_pub.publish(marker)
  def edit_waypoint(self, waypoint_id, delta_x, delta_y, camera):
    """ Temporarily saves the changes to the specified waypoint, and highlights the resulting pose. """
    # calculate the resulting pose
    new_pose = self._compute_pose_by_delta(self._preview_traj[waypoint_id], delta_x, delta_y, camera)
    # save the new pose
    self._preview_traj[waypoint_id] = new_pose
    # preview the new trajectory
    marker_arr = []
    # marker color gradient
    colors = list(START_COLOR.range_to(END_COLOR, len(self._preview_traj)))
    # visualize the trajectory
    for i in range(len(self._preview_traj)):
      # highlight the waypoint that is being editing
      color = WAYPOINT_HIGHLIGHT_COLOR if i == waypoint_id else ColorRGBA(colors[i].red, colors[i].green, colors[i].blue, 0.8)
      marker = Marker(type=Marker.ARROW,
                      id=i,
                      pose=self._preview_traj[i].pose,
                      scale=TRAJ_HIGHLIGHT_SCALE,
                      header=self._preview_traj[i].header,
                      color=color)
      marker_arr.append(marker)
    # clear previous markers
    self._viz_markers_pub.publish(MarkerArray(markers=[]))
    # publish new markers
    self._viz_markers_pub.publish(MarkerArray(markers=marker_arr))
  def modify_traj_in_db(self, cancel_change=True):
    """ Overwrites the previous trajectory in database. """
    if not cancel_change:
      self._db.delete(self._preview_action_abbr)
      self._db.add(self._preview_action_abbr, self._preview_traj)
    self._preview_action_abbr = ""
    self._preview_traj = []
  def do_action_with_abbr(self, abbr, id_num):
    """ 
      Moves arm to perform the action specified by abbr, save the trajectory to database if neccessary.
      Returns true if succeed, false otherwise.
    """
    action_result = False
    if self._prepare_action(abbr, id_num):
      action_result = self._follow_traj_step_by_step(0)
    return action_result
  def do_action_with_abbr_smooth(self, abbr, id_num):
    """
      Moves arm to perform the action specified by abbr smoothly.
      Returns true if succeed, false otherwise.
    """
    action_result = False
    if self._prepare_action(abbr, id_num):
      # calculate a smooth trajectory passing through all the waypoints and move the arm
      action_result = self._move_arm(None, trajectory_waypoint=self._preview_traj, final_state=True, seed_state=self._get_seed_state())
      if not action_result:
        # smooth action fails, do the action step by step instead
        action_result = self._follow_traj_step_by_step(0)
    return action_result
  def pause_action(self):
    """ Pause the current action. """
    # self.relax_arm()
    print(self._current_waypoint_id)
  def continue_action(self):
    """ Continue the current action. """
    # self.freeze_arm()
    print(self._current_waypoint_id)
    # if self._current_waypoint_id > -1:
      # # continue going to the next waypoint
      # self._follow_traj_step_by_step(self._current_waypoint_id + 1)
  def record_action_with_abbr(self, abbr, id_num):
    """
      Records the pose offset named abbr relative to tag, always overwrites the previous entry (if exists).
      Returns true if succeeds, false otherwise.
    """
    # get tag pose
    tag_pose = self._get_tag_with_id(id_num)
    if tag_pose == None:
      return False
    # get the pose to be recorded: transformation lookup
    (position, quaternion) = self._tf_lookup()
    if (position, quaternion) == (None, None):
      return False
    # get the transformation, record it
    record_pose = Pose()
    record_pose.position.x = position[0]
    record_pose.position.y = position[1]
    record_pose.position.z = position[2]
    record_pose.orientation.x = quaternion[0]
    record_pose.orientation.y = quaternion[1]
    record_pose.orientation.z = quaternion[2]
    record_pose.orientation.w = quaternion[3]
    # get the offset between the tag pose and the pose to be recorded
    t_pos = tag_pose.pose.pose
    t_mat = self._pose_to_transform(t_pos)
    w_mat = self._pose_to_transform(record_pose)
    offset = np.dot(np.linalg.inv(t_mat), w_mat)
    # add the offset to database
    self._db.add(abbr, offset)
    self._db.save()
    return True
  def relax_arm(self):
    """ Relax the robot arm, if the program is running on the real robot """
    if not rospy.get_param("use_sim") and not self._arm_relaxed:
      goal = QueryControllerStatesGoal()
      state = ControllerState()
      state.name = 'arm_controller/follow_joint_trajectory'
      state.state = ControllerState.STOPPED
      goal.updates.append(state)
      self._controller_client.send_goal(goal)
      self._controller_client.wait_for_result()
      self._arm_relaxed = True
  def freeze_arm(self):
    """ Freeze the robot arm, if the program is running on the real robot """
    if not rospy.get_param("use_sim") and self._arm_relaxed:
      goal = QueryControllerStatesGoal()
      state = ControllerState()
      state.name = 'arm_controller/follow_joint_trajectory'
      state.state = ControllerState.RUNNING
      goal.updates.append(state)
      self._controller_client.send_goal(goal)
      self._controller_client.wait_for_result()
      self._arm_relaxed = False
  def get_list(self):
    """ Returns a list of AR tags recognized by the robot. """
    return self._reader.get_list()
  def get_db_list(self):
    """ Returns list of entries in the database. """
    return self._db.list()
  def get_db_entry(self, entry):
    """ Returns values associated with the given entry in the database. """
    return self._db.get(entry)
  def delete_db_entry(self, name):
    """ Delete the database entry with the given name """
    self._db.delete(name)
    self._db.save()
  def web_app_request_callback(self, msg):
    """
      Parse the request given by the wee application, and call the corresponding functions.
    """
    request_type, request_args = msg.type, msg.args
    print("type: " + request_type)
    print("args: " + str(request_args) + "\n")
    if request_type == "attach":
      return_msg = "SAKE gripper has already attached!"
      if not self._sake_gripper_attached:
        self._publish_server_response(msg="Attaching SAKE gripper...")
        self.attach_sake_gripper()
        return_msg = "SAKE gripper attached"
      self._publish_server_response(status=True, msg=return_msg)
    elif request_type == "remove":
      return_msg = "SAKE gripper has already removed!"
      if self._sake_gripper_attached:
        self._publish_server_response(msg="Removing SAKE gripper...")
        self.remove_sake_gripper()
        return_msg = "SAKE gripper removed"
      self._publish_server_response(status=True, msg=return_msg)
    elif not self._sake_gripper_attached:
      # need to attach SAKE gripper first
      self._publish_server_response(status=True, msg="Please attach SAKE gripper first!")
    else:
      # SAKE gripper has already attached
      if request_type == "record":
        self._publish_server_response(msg="Recording the current scene...")
        if self.update_env(update_octo=bool(request_args[0])):
          # get the list of body parts and actions
          parts = self.get_list()
          parts_info, actions_info = [], []
          if len(parts):
            for part in parts:
              if part.id in BODY_PARTS and part.id in ACTIONS:
                parts_info.append(str(part.id) + ":" + BODY_PARTS[part.id])
                for action in ACTIONS[part.id]:
                  actions_info.append(str(part.id) + ":" + action + ":" + ABBR[action])
          self._publish_server_response(type="parts", args=parts_info)
          self._publish_server_response(type="actions", status=True, args=actions_info, msg="Scene recorded")
        else:
          self._publish_server_response(status=True, msg="Failed to record the current scene!")
      elif request_type == "prev_id" and len(request_args) == 1:
        id_num = int(request_args[0])  # convert from string to int
        self._publish_server_response(status=True, msg="Previewing " + BODY_PARTS[id_num] + "...")
        self.preview_body_part_with_id(id_num)
      elif request_type == "prev" and len(request_args) == 2:
        abbr, id_num = request_args[0], int(request_args[1])
        waypoints_with_respect_to_tag = self.preview_action_with_abbr(abbr, id_num)
        self._publish_server_response(type=request_type, status=True, args=waypoints_with_respect_to_tag, 
                        msg="Previewing the action with respect to body part " + BODY_PARTS[id_num] + "...")
      elif request_type == "highlight" and len(request_args) == 1:
        waypoint_id = int(request_args[0])
        self.highlight_waypoint(self._preview_traj[waypoint_id], WAYPOINT_HIGHLIGHT_COLOR)
        self._publish_server_response(status=True)
      elif request_type == "edit" and len(request_args) == 4:
        waypoint_id, delta_x, delta_y, camera = int(request_args[0]), int(request_args[1]), int(request_args[2]), request_args[3]
        self.edit_waypoint(waypoint_id, delta_x, delta_y, camera)
        self._publish_server_response(status=True)
      elif request_type == "save_edit" or request_type == "cancel_edit":
        if request_type == "save_edit":
          self.modify_traj_in_db(cancel_change=False)
        else:
          self.modify_traj_in_db()  # cancel all the changes
        self._publish_server_response(status=True)
      elif request_type == "reset":
        self._publish_server_response(msg="Resetting...")
        self.reset()
        self._publish_server_response(status=True, msg="Done")
      elif not self._robot_stopped: 
        # moveit controller is running
        if request_type == "go" and len(request_args) == 1:
          self._do_position_ready = False
          id_num = int(request_args[0])
          self._publish_server_response(msg="Moving towards body part " + BODY_PARTS[id_num] + "...")
          if self.goto_part_with_id(id_num):
            self._grasp_position_ready = True
            self._do_position_id = id_num
            self._publish_server_response(status=True, msg="Done, ready to grasp")
          else:
            self._publish_server_response(status=True, msg="Fail to move!")
        elif request_type == "grasp" and self._grasp_position_ready and len(request_args) == 1:
          self._publish_server_response(msg="Grasping...")
          self._grasp_type = "h_close" if request_args[0] == "h" else "s_close"
          self.do_sake_gripper_action(self._grasp_type)
          self._grasp_position_ready = False
          self._do_position_ready = True
          self._publish_server_response(status=True, msg="Grasped")
        elif request_type == "relax":
          self._publish_server_response(msg="Relaxing arm...")
          self.relax_arm()
          self._publish_server_response(status=True, msg="Arm relaxed")
        elif request_type == "freeze":
          self._publish_server_response(msg="Freezing arm...")
          self.freeze_arm()
          self._publish_server_response(status=True, msg="Arm froze")
        elif (request_type == "do" or request_type == "do_s") and len(request_args) > 0:
          action_abbr = request_args[0]
          return_msg = "Action failed!"
          if self._do_position_ready:
            # performing mode
            self._publish_server_response(msg="Performing " + action_abbr + "...")
            result = False
            if request_type == "do":  # step by step
              result = self.do_action_with_abbr(action_abbr, self._do_position_id)
            else:  # "do_s": smooth
              result = self.do_action_with_abbr_smooth(action_abbr, self._do_position_id)
            if result:
              return_msg = "Action succeeded"
          else:
            return_msg = "Unknown action for body part with ID: " + str(self._do_position_id)
          # always release gripper
          self._publish_server_response(msg="Releasing the gripper...")
          self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
          self._do_position_ready = False
          self._publish_server_response(status=True, msg=return_msg)
        elif request_type == "open":
          self._publish_server_response(msg="Opening the gripper...")
          self.do_sake_gripper_action("open")
          self._do_position_ready = False
          self._publish_server_response(status=True, msg="Gripper opened")
        elif request_type == "stop":
          self._publish_server_response(msg="Stopping the robot...")
          self.estop()
          self._publish_server_response(status=True, msg="Robot stopped, please \"RESET\" if you want to continue using it")
        elif request_type == "run" and len(request_args) == 3:
          #######################################################################################
          ############## todo
          # start execution from the currect step in the action trajectory
          # start execution from the very beginning
          self.web_app_request_callback(WebAppRequest(type="go", args=[request_args[0]]))
          self.web_app_request_callback(WebAppRequest(type="grasp", args=[request_args[1]]))
          self.web_app_request_callback(WebAppRequest(type="do_s", args=[request_args[2]]))
          self._publish_server_response(type=request_type, status=True, msg="DONE")
        elif request_type == "step":
          return_msg = "Ready!"
          waypoint_id = int(request_args[0])
          if waypoint_id == -1:  # goto tag and grasp
            self.web_app_request_callback(WebAppRequest(type="go", args=[request_args[1]]))
            self.web_app_request_callback(WebAppRequest(type="grasp", args=[request_args[2]]))
          else:  # move along trajectory
            self._grasp_type = "h_close" if request_args[1] == "h" else "s_close"
            return_msg = "Fail to reach waypoint #" + request_args[0]
            result = self._goto_waypoint_on_traj_with_id(waypoint_id)
            if waypoint_id == len(self._preview_traj) - 1:  # last point
              self._publish_server_response(msg="Releasing the gripper...")
              self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
              self._do_position_ready = False
            if result:
              return_msg = "Reached waypoint #" + request_args[0]
          self._publish_server_response(type=request_type, status=True, args=[request_args[0]], msg=return_msg)
        elif request_type == "pause":
          self._publish_server_response(msg="Pausing...")
          self.pause_action()
          self._publish_server_response(type=request_type, status=True, msg="PAUSED")
        elif request_type == "continue":
          self._publish_server_response(msg="Continuing...")
          self.continue_action()
          self._publish_server_response(type=request_type, status=True, msg="CONTINUED")
      else: 
        self._publish_server_response(status=True, msg="Invalid command :)")
  def _reset_program_state(self):
    """ Resets program state. """
    self._grasp_position_ready = False
    self._do_position_ready = False
    self._do_position_id = -1
  def _clear_octomap(self):
    """ Clears the octomap. Returns true if succeeds, false otherwise. """
    rospy.wait_for_service('clear_octomap')
    try:
      clear_octo = rospy.ServiceProxy('clear_octomap', Empty)
      clear_octo()
    except rospy.ServiceException, e:
      rospy.logerr('Fail clear octomap: {}'.format(e))
      return False
    return True
  def _move_arm_relative(self, ref_pose, ref_header, offset=None, preview_only=False, seed_state=None):
    """ 
      Calculates the coordinate of the goal by adding the offset to the given reference pose, 
      and moves the arm to the goal. If it's only for previewing, returns the goal pose,
      else returns the result of the movement.
    """
    goal_pose = PoseStamped()
    goal_pose.header = ref_header
    if offset is not None:
      # current pose is valid, perform action
      t_mat = self._pose_to_transform(ref_pose)
      # compute the new coordinate
      new_trans = np.dot(t_mat, offset)
      pose = self._transform_to_pose(new_trans)
      goal_pose.pose = pose
    else:
      goal_pose.pose = ref_pose
    
    if preview_only:
      return goal_pose
    else:
      # move to the goal position while avoiding unreasonable trajectories!
      # close SAKE gripper again to ensure the limb is grasped
      self.do_sake_gripper_action(self._grasp_type)
      # visualize goal pose
      self.highlight_waypoint(goal_pose, ColorRGBA(1.0, 1.0, 0.0, 0.8))
      return goal_pose if self._move_arm(goal_pose, final_state=True, seed_state=seed_state) else None
  def _move_arm(self, goal_pose, trajectory_waypoint=[], final_state=False, seed_state=None):
    """ 
      Moves arm to the specified goal_pose. Returns true if succeed, false otherwise.
    """
    error = None
    if not final_state:
      # simply go to the goal_pose
      error = self._arm.move_to_pose_with_seed(goal_pose, seed_state, [], **self._kwargs)
      # record the current pose because we still have the "do action" step to do
      self._current_pose = goal_pose
    else:
      # go to goal_pose while avoiding unreasonable trajectories!
      if trajectory_waypoint:
        # create an array of waypoints
        waypoints = []
        for tw in trajectory_waypoint:
          waypoints.append(tw.pose)
        # using trajectory waypoints to perform a smooth motion
        plan = self._arm.get_cartesian_path(self._moveit_group, seed_state, waypoints)
        if plan:
          error = self._arm.execute_trajectory(self._moveit_group, plan)
        else:
          error = 'PLANNING_FAILED'
      else:
        # using seed
        error = self._arm.move_to_pose_with_seed(goal_pose, seed_state, [], **self._kwargs)
        if error is not None:
          # planning with seed failed, try without seed 
          # moveit: move group commander
          # check if the pose can be reached in a straight line motion
          plan = self._arm.straight_move_to_pose_check(self._moveit_group, goal_pose)
          if plan:
            error = self._arm.straight_move_to_pose(self._moveit_group, plan)
          else:
            error = 'PLANNING_FAILED'
      # reset current pose to none
      self._current_pose = None
    
    if error is not None:
      self._arm.cancel_all_goals()
      rospy.logerr("Fail to move: {}".format(error))
      return False
    # succeed
    return True
  def _transform_to_pose(self, matrix):
    """ Matrix to pose """
    pose = Pose()
    trans_vector = tft.translation_from_matrix(matrix)
    pose.position = Point(trans_vector[0], trans_vector[1], trans_vector[2])
    quartern = tft.quaternion_from_matrix(matrix)
    pose.orientation = Quaternion(quartern[0], quartern[1], quartern[2], quartern[3])
    return pose
  def _pose_to_transform(self, pose):
    """ Pose to matrix """
    q = pose.orientation
    matrix = tft.quaternion_matrix([q.x, q.y, q.z, q.w])
    matrix[0, 3] = pose.position.x
    matrix[1, 3] = pose.position.y
    matrix[2, 3] = pose.position.z
    return matrix
  def _get_tag_with_id(self, id_num):
    """ Returns the AR tag with the given id, returns None if id not found """
    tag_pose = self._reader.get_tag(id_num)
    if tag_pose == None:
      rospy.logerr("AR tag lookup error: Invalid ID# " + str(id_num))
    return tag_pose
  def _tf_lookup(self):
    """ 
      Lookups the transformation between "base_link" and "wrist_roll_link" (retry up to TRANSFROM_LOOKUP_RETRY times),
      and returns the result
    """
    (position, quaternion) = (None, None)
    count = 0
    while True:
      if (position, quaternion) != (None, None):  # lookup succeeds
        return (position, quaternion)
      elif count >= TRANSFROM_LOOKUP_RETRY:  # exceeds maximum retry times
        rospy.logerr("Fail to lookup transfrom information between 'base_link' and 'wrist_roll_link'")
        return (None, None)
      else: # try to lookup transform information
        try:
          (position, quaternion) = self._tf_listener.lookupTransform('base_link', 'wrist_roll_link', rospy.Time(0))
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
          count += 1
          continue
  def _get_goto_pose(self, ar_pose):
    """
      Calculates the grasp pose of gripper given the AR tag pose, returns the gripper pose.
    """
    grasp_offset = self._db.get("GRASP")
    goal_pose = self._move_arm_relative(ar_pose.pose.pose, ar_pose.header, offset=grasp_offset, preview_only=True)
    self.highlight_waypoint(goal_pose, WAYPOINT_HIGHLIGHT_COLOR)
    return goal_pose
  def _prepare_action(self, abbr, id_num):
    """
      Previews the action trajectory, moves robot arm to the starting position of the action and
      grasps the limb. Returns true if succeeds, false otherwise.
    """
    # preview action
    self.preview_action_with_abbr(abbr, id_num)
    # freeze arm for moveit
    self.freeze_arm()
    # get AR tag information
    tag_pose = self._get_tag_with_id(id_num)
    if tag_pose == None or self._current_pose == None:
      return False
    # move arm to the starting position relative to AR tag
    if not self._preview_traj or not self.goto_part_with_id(id_num):
      rospy.logerr("Fail to move to the starting position for action: " + abbr)
      return False
    return True
  def _follow_traj_step_by_step(self, start_id):
    """
      Follows the current trajectory step by step. Returns true if at least one waypoint is reached, false otherwise.
    """
    succeed = False
    for i in range(start_id, len(self._preview_traj) - 1):
      goal_pose = self._preview_traj[i + 1]
      # move arm relative to the previous pose (use seed), skip the current waypoint if the current action fails
      action_result = self._move_arm_relative(goal_pose.pose, goal_pose.header, seed_state=self._get_seed_state())
      # record the current waypoint id
      self._current_waypoint_id = i + 1
      # check action result
      if action_result is not None:
        succeed = True  # the whole action succeeds if at least one pose is reached
      else:
        rospy.logerr("Fail to reach waypoint " + str(i + 1))
    # action finished, reset the current waypoint
    self._current_waypoint_id = -1
    return succeed
  def _goto_waypoint_on_traj_with_id(self, waypoint_id):
    """
      Go to the specific waypoint on the trajectory. Returns true if succeeds, false otherwise.
    """
    succeed = False
    if -1 < waypoint_id < len(self._preview_traj):
      goal_pose = self._preview_traj[waypoint_id]
      action_result = self._move_arm_relative(goal_pose.pose, goal_pose.header, seed_state=self._get_seed_state())
      if action_result is not None:
        succeed = True
      else:
        rospy.logerr("Fail to reach waypoint " + str(i + 1))
    return succeed
  def _save_traj_to_db(self, abbr, id_num):
    """
      Checks bag file for the trajectory with given abbr, calculate waypoints and save those points to the database.
      Returns the calculated waypoints if succeed, none otherwise.
    """
    # check bag files for the trajectory
    bag_file_path = os.path.join(self._bag_file_dir, abbr.lower() + '.bag')
    bag = rosbag.Bag(bag_file_path)
    waypoints = []
    prev_msg = []
    # get the trajectory from bag file
    for topic, msg, t in bag.read_messages(topics=['/joint_states']):
      joint_state = list(msg.position[6:13])
      if len(joint_state) != 0 and (len(prev_msg) == 0 or np.abs(np.sum(np.subtract(joint_state, prev_msg))) > rospy.get_param("arm_traj_threshold")):
        prev_msg = joint_state
        # use forward kinematics to find the wrist position
        point = self._arm.compute_fk(msg)
        if point:
          # add the result position
          waypoints.append(point[0])
    bag.close()
    if len(waypoints) < 2:
      # this trajectory is empty because it only contains the starting point
      rospy.logerr("Empty trajectory for action: " + abbr)
      return None
    
    # add the result position to database
    self._db.add(abbr, waypoints)
    self._db.save()
    return waypoints
  def _publish_server_response(self, type="", status=False, args=[], msg=""):
    """ Publishes the server response message, and prints the message in console if needed. """
    if rospy.get_param("console_output"):
      print(msg)
    self._web_app_response_pub.publish(WebAppResponse(type=type, status=status, args=args, msg=msg))
  def _compute_pose_by_delta(self, current_pose, delta_x, delta_y, camera):
    """ 
      Computes and returns the new pose with respect to base_link after applying 
      delta_x and delta_y to the current pose in the specified camera view.
    """
    x_distance, y_distance = dpx_to_distance(delta_x, delta_y, camera, current_pose, True)
    return delta_modified_stamped_pose(x_distance, y_distance, camera, current_pose)
  def _get_seed_state(self):
    """ Returns the current arm joint state as the seed used in motion planning. """
    seed_state = JointState()
    seed_state.name = self._arm_joints.names()
    seed_state.position = self._arm_joints.values()
    return seed_state
  def _set_sake_gripper_action_status(self, msg):
    """ This is the callback of sake gripper status. """
    self._sake_gripper_action_finished = True
 | 
	mit | -8,208,984,794,664,362,000 | 40.827053 | 150 | 0.642281 | false | 
| 
	mmktomato/zenmai-bts | 
	test/test_zenmai.py | 
	1 | 
	15025 | 
	"""Unit test"""
import unittest
import re
import io
from datetime import datetime, timedelta
from flask import request, session
from . import ctx
from .zenmai_test_utils import create_issue, create_comment, create_attached_file, create_user, \
                                login, logout, delete_all_issues
from web.models.issue import Issue
from web.models.user import User
class ZenmaiTestCase(unittest.TestCase):
    """TestCase class"""
    def _assert_403(self, data):
        """Helper method of 403 assertion."""
        self.assertIn('403', data)
        self.assertIn('Forbidden', data)
    def _assert_issue_detail(self, data, subject, body, pub_date, state_name, attached_file_name):
        """Helper method of issue detail page assertion.
        Args:
            data (string): HTTP Response body.
            subject (string): Regex string of subject.
            body (string): Regex string of body.
            pub_date (datetime): pub_date.
            state_name (string): Regex string of state name.
            attached_file_name (string): Regex string of attached file name.
        """
        # subject
        subject_regex = re.compile('<h1>{}</h1>'.format(subject))
        self.assertRegex(data, subject_regex)
        # body
        body_regex = re.compile(r'<div class="panel-body">.*<p class="zen-comment-body">{}</p>.*</div>'.format(body), re.DOTALL)
        self.assertRegex(data, body_regex)
        # pub_date
        pub_date_regex = re.compile('<div class="panel-heading">.*{0:%Y-%m-%d %H:%M:%S}.*</div>'.format(pub_date), re.DOTALL)
        self.assertRegex(data, pub_date_regex)
        # state_name
        state_name_regex = re.compile('<span class="label.*">{}</span>'.format(state_name))
        self.assertRegex(data, state_name_regex)
        # attached_file_name
        attached_file_name_regex = re.compile('<div class="panel-footer">.*download: <a href="/download/\d+/">{}</a>.*</div>'.format(attached_file_name), re.DOTALL)
        self.assertRegex(data, attached_file_name_regex)
    def test_get_issue_list(self):
        """Test case of issue list. (HTTP GET)"""
        issue = create_issue(subject='test subject.test_get_issue_list.')
        issue.add()
        res = ctx['TEST_APP'].get('/')
        data = res.data.decode('utf-8')
        subject_regex = re.compile(r'<a href="/{}/">.*test subject\.test_get_issue_list\..*</a>'.format(issue.id))
        self.assertRegex(data, subject_regex)
    def test_get_empty_issue_list(self):
        """Test case of no issues. (HTTP GET)"""
        delete_all_issues()
        res = ctx['TEST_APP'].get('/')
        data = res.data.decode('utf-8')
        self.assertIn('No issues.', data)
    def test_get_issue_detail(self):
        """Test case of issue detail. (HTTP GET)"""
        pub_date = datetime.utcnow() + timedelta(days=1) # tommorow
        attached_file = create_attached_file(name='test.txt')
        comment = create_comment(body='test body.test_get_issue_detail.', pub_date=pub_date, attached_files=[attached_file])
        issue = create_issue(subject='test subject.test_get_issue_detail.', comments=[comment], state_id=2)
        issue.add()
        res = ctx['TEST_APP'].get('/{}/'.format(issue.id))
        data = res.data.decode('utf-8')
        self._assert_issue_detail(
                data=data,
                subject='test subject\.test_get_issue_detail\.',
                body='test body\.test_get_issue_detail\.',
                pub_date=pub_date,
                state_name=issue.state.name,
                attached_file_name='test\.txt')
    def test_post_issue_detail(self):
        """Test case of issue detail. (HTTP POST)"""
        issue = create_issue()
        issue.add()
        # without authentication.
        res = ctx['TEST_APP'].post('/{}/'.format(issue.id), data={
            'csrf_token': ctx['CSRF_TOKEN']
        }, follow_redirects=True)
        self._assert_403(res.data.decode('utf-8'))
        # with authentication.
        with login() as (user, _):
            res = ctx['TEST_APP'].post('/{}/'.format(issue.id), data={
                'csrf_token': ctx['CSRF_TOKEN'],
                'new_body': 'test body.test_post_issue_detail',
                'new_state': 1,
                'file': (io.BytesIO(b'test attached file content.test_post_issue_detail.'), 'test.txt')
            }, follow_redirects=True)
            data = res.data.decode('utf-8')
            self._assert_issue_detail(
                    data=data,
                    subject=issue.subject,
                    body=issue.comments[0].body,
                    pub_date=issue.comments[0].pub_date,
                    state_name=issue.state.name,
                    attached_file_name='test\.txt')
    def test_get_no_issue_detail(self):
        """Test case of no issue detail. (HTTP GET)"""
        issue = create_issue()
        issue.add()
        res = ctx['TEST_APP'].get('/{}/'.format(issue.id + 1))
        self.assertEqual(res.status_code, 404)
    def test_get_new_issue(self):
        """Test case of new issue page. (HTTP GET)"""
        # without authentication.
        res = ctx['TEST_APP'].get('/new/', follow_redirects=True)
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - login</title>', data)
        self.assertIn('you need to login.', data)
        # with authentication.
        with login() as (_, _):
            res = ctx['TEST_APP'].get('/new/')
            data = res.data.decode('utf-8')
            self.assertIn('<title>Zenmai - new issue</title>', data)
            self.assertIn('Add new issue', data)
    def test_post_new_issue(self):
        """Test case of new issue page. (HTTP POST)"""
        # without authentication.
        res = ctx['TEST_APP'].post('/new/', data={
            'csrf_token': ctx['CSRF_TOKEN']
        }, follow_redirects=True)
        self._assert_403(res.data.decode('utf-8'))
        # with authentication.
        with login() as (user, _):
            res = ctx['TEST_APP'].post('/new/', data={
                'csrf_token': ctx['CSRF_TOKEN'],
                'new_subject': 'test subject.test_post_new_issue.',
                'new_body': 'test body.test_post_new_issue.',
                'new_state': 1,
                'file': (io.BytesIO(b'test attached file content.test_post_new_issue.'), 'test.txt')
            }, follow_redirects=True)
            data = res.data.decode('utf-8')
            issue = Issue.query \
                .filter_by(subject='test subject.test_post_new_issue.') \
                .first()
            self._assert_issue_detail(
                    data=data,
                    subject='test subject\.test_post_new_issue\.',
                    body='test body\.test_post_new_issue\.',
                    pub_date=issue.comments[0].pub_date,
                    state_name='Open',
                    attached_file_name='test\.txt')
    def test_post_large_attached_file(self):
        """Test case of post request with too large attached file."""
        large_buf = bytes(ctx['APP'].config['MAX_CONTENT_LENGTH'] + 1)
        res = ctx['TEST_APP'].post('/new/', data={
            'new_subject': 'test subject.test_post_new_issue.',
            'new_body': 'test body.test_post_new_issue.',
            'new_state': 1,
            'file': (io.BytesIO(large_buf), 'test.txt')
        }, follow_redirects=True)
        self.assertEqual(res.status_code, 413)
    def test_get_download_attached_file(self):
        """Test case of downloading attached file. (HTTP GET)"""
        attached_file = create_attached_file(data=b'test content of attached file.test_get_download_attached_file.')
        comment = create_comment(attached_files=[attached_file])
        issue = create_issue(comments=[comment])
        issue.add()
        res = ctx['TEST_APP'].get('/download/{}/'.format(attached_file.id))
        data = res.data.decode('utf-8')
        self.assertEqual(data, 'test content of attached file.test_get_download_attached_file.')
    def test_get_login_page(self):
        """Test case of login page. (HTTP GET)"""
        res = ctx['TEST_APP'].get('/user/login/')
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - login</title>', data)
        self.assertEqual(res.status_code, 200)
    def test_post_login_page(self):
        """Test case of login. (HTTP POST)"""
        user = create_user( \
                id='testid.test_post_login_page', \
                name='testname.test_post_login_page', \
                password='testpassword.test_post_login_page')
        with login(user, 'testpassword.test_post_login_page') as (_, res):
            data = res.data.decode('utf-8')
            self.assertEqual(res.status_code, 200)
            self.assertIn('<li><a href="/user/">{}(id:{})</a></li>'.format(user.name, user.id), data)
    def test_get_logout_page(self):
        """Test case of logout. (HTTP GET)"""
        user = create_user( \
                id='testid.test_get_logout_page', \
                name='testname.test_post_logout_page', \
                password='testpassword.test_post_logout_page')
        with login(user, 'testpassword.test_post_logout_page', do_logout=False):
            pass
        res = logout()
        data = res.data.decode('utf-8')
        self.assertEqual(res.status_code, 200)
        self.assertIn('<title>Zenmai - issues</title>', data)
    def test_get_user_register_page(self):
        """Test case of user register page. (HTTP GET)"""
        res = ctx['TEST_APP'].get('/user/new/')
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - register</title>', data)
        self.assertEqual(res.status_code, 200)
    def test_post_register_valid_user(self):
        """Test case of valid user registration. (HTTP POST)"""
        res = ctx['TEST_APP'].post('/user/new/', data={
            'csrf_token': ctx['CSRF_TOKEN'],
            'user_id': 'testid.test_post_register_valid_user',
            'user_name': 'testname.test_post_register_valid_user',
            'password': 'testpassword.test_post_register_valid_user',
            'password_retype': 'testpassword.test_post_register_valid_user'
        }, follow_redirects=True)
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - login</title>', data)
        self.assertEqual(res.status_code, 200)
    def test_post_register_invalid_user(self):
        """Test case of invalid user registration. (HTTP POST)"""
        # password is not matched.
        res = ctx['TEST_APP'].post('/user/new/', data={
            'csrf_token': ctx['CSRF_TOKEN'],
            'user_id': 'testid.test_post_register_invalid_user',
            'user_name': 'testname.test_post_register_invalid_user',
            'password': 'testpassword.test_post_register_invalid_user',
            'password_retype': 'invalid password'
        }, follow_redirects=True)
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - register</title>', data)
        self.assertIn('password is not matched.', data)
        self.assertEqual(res.status_code, 200)
        # already exist.
        ctx['TEST_APP'].post('/user/new/', data={
            'csrf_token': ctx['CSRF_TOKEN'],
            'user_id': 'testid.test_post_register_invalid_user',
            'user_name': 'testname.test_post_register_invalid_user',
            'password': 'testpassword.test_post_register_invalid_user',
            'password_retype': 'testpassword.test_post_register_invalid_user'
        }, follow_redirects=True)
        res = ctx['TEST_APP'].post('/user/new/', data={
            'csrf_token': ctx['CSRF_TOKEN'],
            'user_id': 'testid.test_post_register_invalid_user',
            'user_name': 'testname.test_post_register_invalid_user',
            'password': 'testpassword.test_post_register_invalid_user',
            'password_retype': 'testpassword.test_post_register_invalid_user'
        }, follow_redirects=True)
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - register</title>', data)
        self.assertIn("id '{}' is already exists.".format('testid.test_post_register_invalid_user'), data)
        self.assertEqual(res.status_code, 200)
    def test_get_user_page(self):
        """Test case of user page. (HTTP GET)"""
        # without authentication.
        res = ctx['TEST_APP'].get('/user/', follow_redirects=True)
        data = res.data.decode('utf-8')
        self.assertIn('<title>Zenmai - login</title>', data)
        self.assertEqual(res.status_code, 200)
        # with authentication.
        with login() as (user, _):
            res = ctx['TEST_APP'].get('/user/')
            data = res.data.decode('utf-8')
            self.assertIn('<td>{}</td>'.format(user.id), data)
            self.assertIn('<td>{}</td>'.format(user.name), data)
            self.assertEqual(res.status_code, 200)
    def test_get_user_edit_page(self):
        """Test case of user edit page. (HTTP GET)"""
        # without authentication.
        res = ctx['TEST_APP'].get('/user/edit/')
        data = res.data.decode('utf-8')
        self._assert_403(res.data.decode('utf-8'))
        # with authentication.
        with login() as (user, _):
            res = ctx['TEST_APP'].get('/user/edit/')
            data = res.data.decode('utf-8')
            self.assertIn('<title>Zenmai - edit user</title>', data)
            self.assertIn('value="{}" readonly'.format(user.id), data)
            self.assertIn('value="{}"'.format(user.name), data)
            self.assertIn("leave empty if you don't want to change password.", data)
            self.assertEqual(res.status_code, 200)
    def test_post_user_edit_page(self):
        """Test case of edit user. (HTTP POST)"""
        # without authentication.
        res = ctx['TEST_APP'].post('/user/edit/', data={
            'csrf_token': ctx['CSRF_TOKEN'],
            'user_id': 'testid.test_post_user_edit_page',
            'user_name': 'testname.test_post_user_edit_page',
            'password': 'testpassword.test_post_user_edit_page',
            'password_retype': 'testpassword.test_post_user_edit_page'
        }, follow_redirects=True)
        self._assert_403(res.data.decode('utf-8'))
        # with authentication.
        with login() as (user, _):
            res = ctx['TEST_APP'].post('/user/edit/', data={
                'csrf_token': ctx['CSRF_TOKEN'],
                'user_id': user.id,
                'user_name': 'new testname.test_post_user_edit_page',
                'password': 'new testpassword.test_post_user_edit_page',
                'password_retype': 'new testpassword.test_post_user_edit_page'
            }, follow_redirects=True)
            data = res.data.decode('utf-8')
            self.assertIn('<td>{}</td>'.format(user.id), data)
            self.assertIn('<td>{}</td>'.format('new testname.test_post_user_edit_page'), data)
            self.assertEqual(res.status_code, 200)
 | 
	mit | 1,231,649,006,724,835,000 | 40.969274 | 164 | 0.568053 | false | 
| 
	MrBhendel/Cirrus | 
	ModulusList.py | 
	1 | 
	1374 | 
	import sys
from abc import ABCMeta, abstractmethod
class ModulusList:
	'''
	Maintains a list of (host, modulus, e) tuples.
	'''
	__metaclass__ = ABCMeta
	def __init__(self):
		self._modulusList = []
	def addModulusList(self, other):
		for i in range(0, other.length()):
			item = other[i]
			self.add(item[0], item[1], item[2])
	@abstractmethod
	def add(self, host, modulus, e):
		pass
	@abstractmethod
	def length(self):
		pass
	@abstractmethod
	def __getitem__(self, index):
		pass
	@abstractmethod
	def saveListToFile(self, fileName):
		pass
	@abstractmethod
	def loadListFromFile(self, fileName):
		pass
class ModulusListImpl(ModulusList):
	def add(self, host, modulus, e):
		self._modulusList.append((host, modulus, e))
	def length(self):
		return len(self._modulusList)
	def __getitem__(self, index):
		return self._modulusList[index]
	def saveListToFile(self, fileName):
		saveFile = open(fileName, 'w')
		for record in self._modulusList:
			saveFile.write(str(record[0]) + '\n')
			saveFile.write(str(record[1]) + '\n')
			saveFile.write(str(record[2]) + '\n')
		saveFile.close()
	def loadListFromFile(self, fileName):
		loadFile = open(fileName, 'r')
		while True:
			host = loadFile.readline().rstrip()
			n = loadFile.readline().rstrip()
			e = loadFile.readline().rstrip()
			if not e: break
			self._modulusList.append((host, long(n), long(e)))
 | 
	mit | 1,942,632,575,379,380,700 | 19.818182 | 53 | 0.6754 | false | 
| 
	kshimasaki/ML-fMRI-Pipeline | 
	Python_Code/dendrogram_specific_file.py | 
	1 | 
	6075 | 
	import scipy
import scipy.cluster.hierarchy as sch
import matplotlib.pylab as plt
import matplotlib.axes
import scipy.io as scio
import os
import numpy
import sklearn.metrics.pairwise
import scipy.spatial.distance
import sys
def cluster(network, subjects_or_features, gt_append , method_name):
	os.chdir('/Volumes/TITAN/Workingdir/Studies/Space_Fortress/Inputs/Neurodata/Features/Learn_Features/Pre/'+network+'_from_'+network+'/')
	root = '/Volumes/TITAN/Workingdir/Studies/Space_Fortress/Inputs/Neurodata/Features/Learn_Features/Pre/'+network+'_from_'+network+'/'
	 #for root, dirs, files in os.walk('/Volumes/TITAN/Workingdir/Studies/Space_Fortress/Inputs/Neurodata/Features/Learn_Features/Sub/'+network+'_from_'+network+'/Uncontrolled/No_GT/'+gt_append, topdown=False):
	 	#for matrix in files:
	 		#if  matrix.endswith("_Features.mat"):	 			
	matrix =  "All_Features.mat"
	current = os.path.join(root, matrix)
	data = scipy.io.loadmat(current)
	data = data['learnPATS'] #features
	if subjects_or_features == 0:
		data = numpy.rot90(data) #subjects
	n = len(data)
	p = len(data[0])
	sqeuclid = numpy.zeros((n, n))
	for i in range(0, n):
		for j in range(0, n):
			for k in  range(0, p):
				sqeuclid[i][j] = 0
				sqeuclid[i][j] = sqeuclid[i][j] + (data[i][k] - data[j][k])**2
		#d = sch.distance.pdist(data, 'euclidean')
		d = sch.distance.pdist(data)
		#numpy.savetxt("sqeuclid.csv", sqeuclid, delimiter=",")
		#proximity_features = scipy.spatial.distance.squareform(sqeuclid)
		Z= sch.linkage(sqeuclid,method='single')
	P= sch.dendrogram(Z,truncate_mode='none')
	fig = plt.gcf()
	# fig.set_size_inches(125,60)
	if subjects_or_features == 1:
		matplotlib.rcParams.update({'font.size': 16})
		fig.set_size_inches(40,10)
	elif subjects_or_features == 0:
		matplotlib.rcParams.update({'font.size': 10})
		fig.set_size_inches(14,9)
	fig.set_size_inches(9000,100)
	fig.autofmt_xdate()
	plt.xticks(rotation=90)
	temp = '/Volumes/TITAN/Workingdir/Studies/Space_Fortress/Inputs/Neurodata/Features/Learn_Features/Pre/'+network+'_from_'+network+'/'
	#temp = temp.replace("Uncontrolled/No_GT/LOSO_Standard_with_MS/", "")
	#temp = temp.replace("Uncontrolled/No_GT/LOSO_Standard_No_MS/", "")
	#print temp
	name_matrix=matrix.replace('.mat','')
	fig.savefig(temp+name_matrix+"_"+method_name+"_Dendrogram.pdf")
	fig.clf()
	#print temp+name_matrix+"_"+method_name+"_Dendrogram.pdf"
	#Mlab_linkage= sch.to_mlab_linkage(Z)
	#scio.savemat(temp+"/"+name_matrix+"linkage"+".mat",{'Mlab_linkage': Mlab_linkage})
	#scio.savemat(temp+"/"+name_matrix+"dendro"+".mat",{'P': P})
	#R=sch.inconsistent(Z,d=2)
	#MR=sch.maxRstat(Z,R,3)
	#MI=sch.maxinconsts(Z,R)
	#final_clust_maxmono_5= sch.fcluster(Z,t=5,criterion='maxclust_monocrit',monocrit=MI)
	#scio.savemat(temp+"/"+name_matrix+"_maxmono_5_clusters"+".mat",{'final_clust_maxmono_5': final_clust_maxmono_5})
	#final_clust_mono_1= sch.fcluster(Z,t=1,criterion='monocrit',monocrit=MR)
	#scio.savemat(temp+"/"+name_matrix+"_mono_1_clusters"+".mat",{'final_clust_mono_1': final_clust_mono_1})
	#final_clust_dist_1= sch.fcluster(Z,t=1,criterion='distance')
	#scio.savemat(temp+"/"+name_matrix+"_dist_1_clusters"+".mat",{'final_clust_dist_1': final_clust_dist_1})
	increment = 0
	while increment <= 1:
		final_clust_incon_1=sch.fclusterdata(sqeuclid, increment)
		import csv
		table = [[]]
		temp_row = ["Cluster #", "Data id"]
		table.append(temp_row)
		#print sch.leaders(Z,final_clust_incon_1)
		print final_clust_incon_1
		for j in range(1, max(final_clust_incon_1)):
			print "cluster: ", j
			for i in range(0, len(final_clust_incon_1)):
				if final_clust_incon_1[i] == j:
					temp_row = [j, i]
					table.append(temp_row)
		#plot_tree(P, range(10))
		if not os.path.exists(temp+name_matrix+"_"+method_name+"_clusters"):
			os.makedirs(temp+name_matrix+"_"+method_name+"_clusters")
		with open(temp+name_matrix+"_"+method_name+"_clusters/"+str(increment)+".csv", 'w') as f:
			wrtr = csv.writer(f)
			wrtr.writerows(table)
		increment = increment + 0.25
	#numpy.savetxt(temp+name_matrix+"_"+method_name+"_test.csv", table, delimiter=",", fmt="%s")
	scio.savemat(temp+name_matrix+"_"+method_name+"_incon_1_clusters"+".mat",{'final_clust_incon_1': final_clust_incon_1})
	#final_clust_max_5=sch.fcluster(Z,t=5)
	#scio.savemat(temp+"/"+name_matrix+"_max_5_clusters"+".mat",{'final_clust_max_5': final_clust_max_5})
	#clustered_data_inc_1=sch.fclusterdata(Z,t=1)	
	#scio.savemat(temp+"/"+name_matrix+"_data_inc_1_clusters"+".mat",{'clustered_data_inc_1': clustered_data_inc_1})
	# os.chdir('/Volumes/Users/Aki_Undergrads/Workingdir/Studies/Space_Fortress/Inputs/Neurodata/Partial_Correlation_Matrices/ML_GT_RS_ProcessedSplit/Pre/Black_from_Black/Uncontrolled/')
	# mat = scipy.io.loadmat('0018001_matrix.mat')
	# X = scipy.randn(10,2)
	# X = mat['Matrix_0018001']
	# d = sch.distance.pdist(X)
	 
		
		# create all graphs at one time
		
def plot_tree( P, pos=None ):
    icoord = scipy.array( P['icoord'] )
    dcoord = scipy.array( P['dcoord'] )
    color_list = scipy.array( P['color_list'] )
    xmin, xmax = icoord.min(), icoord.max()
    ymin, ymax = dcoord.min(), dcoord.max()
    if pos:
        icoord = icoord[pos]
        dcoord = dcoord[pos]
        color_list = color_list[pos]
    for xs, ys, color in zip(icoord, dcoord, color_list):
        plt.plot(xs, ys,  color)
    plt.xlim( xmin-10, xmax + 0.1*abs(xmax) )
    plt.ylim( ymin, ymax + 0.1*abs(ymax) )
    plt.show()
if __name__ == '__main__':
	testVAR = sys.argv[1]
	testVAR2 = sys.argv[2]
	testVAR3 = sys.argv[3]
	method_name = sys.argv[4]
	# testVAR=raw_input("What network should be used? ")
	# testVAR2=raw_input("What analysis should be used? ")
	# testVAR3=raw_input("subjects or features?")
	while(1):
		if testVAR3 == "subjects" or testVAR3 == "Subjects":
			testVAR3 = 0
			break
		elif testVAR3 == "features" or testVAR3 == "Features":
			testVAR3 = 1
			break
		else:
			print "Please type either \"subjects\" or \"features\""
	
	# method_name=raw_input("What should I name this? ")
	cluster(testVAR, testVAR3, testVAR2, method_name)
 | 
	gpl-2.0 | 5,425,072,207,551,465,000 | 33.913793 | 207 | 0.677695 | false | 
| 
	pf4d/dolfin-adjoint | 
	dolfin_adjoint/adjrhs.py | 
	1 | 
	10667 | 
	import libadjoint
import backend
import ufl
import ufl.algorithms
import adjglobals
import adjlinalg
import utils
def find_previous_variable(var):
    ''' Returns the previous instance of the given variable. '''
    for timestep in range(var.timestep, -1, -1):
        prev_var = libadjoint.Variable(var.name, timestep, 0)
        if adjglobals.adjointer.variable_known(prev_var):
            prev_var.var.iteration = prev_var.iteration_count(adjglobals.adjointer) - 1
            return prev_var
    raise libadjoint.exceptions.LibadjointErrorInvalidInputs, 'No previous variable found'
def _extract_function_coeffs(form):
    for c in ufl.algorithms.extract_coefficients(form):
        if isinstance(c, backend.Function):
            yield c
class RHS(libadjoint.RHS):
    '''This class implements the libadjoint.RHS abstract base class for the Dolfin adjoint.
    It takes in a form, and implements the necessary routines such as calling the right-hand side
    and taking its derivative.'''
    def __init__(self, form):
        self.form=form
        if isinstance(self.form, ufl.form.Form):
            self.deps = [adjglobals.adj_variables[coeff] for coeff in _extract_function_coeffs(self.form)]
        else:
            self.deps = []
        if isinstance(self.form, ufl.form.Form):
            self.coeffs = [coeff for coeff in _extract_function_coeffs(self.form)]
        else:
            self.coeffs = []
    def __call__(self, dependencies, values):
        if isinstance(self.form, ufl.form.Form):
            dolfin_dependencies=[dep for dep in _extract_function_coeffs(self.form)]
            dolfin_values=[val.data for val in values]
            return adjlinalg.Vector(backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values))))
        else:
            # RHS is a adjlinalg.Vector.
            assert isinstance(self.form, adjlinalg.Vector)
            return self.form
    def derivative_action(self, dependencies, values, variable, contraction_vector, hermitian):
        if contraction_vector.data is None:
            return adjlinalg.Vector(None)
        if isinstance(self.form, ufl.form.Form):
            # Find the dolfin Function corresponding to variable.
            dolfin_variable = values[dependencies.index(variable)].data
            dolfin_dependencies = [dep for dep in _extract_function_coeffs(self.form)]
            dolfin_values = [val.data for val in values]
            current_form = backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values)))
            trial = backend.TrialFunction(dolfin_variable.function_space())
            d_rhs = backend.derivative(current_form, dolfin_variable, trial)
            if hermitian:
                action = backend.action(backend.adjoint(d_rhs), contraction_vector.data)
            else:
                action = backend.action(d_rhs, contraction_vector.data)
            return adjlinalg.Vector(action)
        else:
            # RHS is a adjlinalg.Vector. Its derivative is therefore zero.
            return adjlinalg.Vector(None)
    def second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action_vector):
        if isinstance(self.form, ufl.form.Form):
            # Find the dolfin Function corresponding to variable.
            dolfin_inner_variable = values[dependencies.index(inner_variable)].data
            dolfin_outer_variable = values[dependencies.index(outer_variable)].data
            dolfin_dependencies = [dep for dep in _extract_function_coeffs(self.form)]
            dolfin_values = [val.data for val in values]
            current_form = backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values)))
            trial = backend.TrialFunction(dolfin_outer_variable.function_space())
            d_rhs = backend.derivative(current_form, dolfin_inner_variable, inner_contraction_vector.data)
            d_rhs = ufl.algorithms.expand_derivatives(d_rhs)
            if len(d_rhs.integrals()) == 0:
                return None
            d_rhs = backend.derivative(d_rhs, dolfin_outer_variable, trial)
            d_rhs = ufl.algorithms.expand_derivatives(d_rhs)
            if len(d_rhs.integrals()) == 0:
                return None
            if hermitian:
                action = backend.action(backend.adjoint(d_rhs), action_vector.data)
            else:
                action = backend.action(d_rhs, action_vector.data)
            return adjlinalg.Vector(action)
        else:
            # RHS is a adjlinalg.Vector. Its derivative is therefore zero.
            raise exceptions.LibadjointErrorNotImplemented("No derivative method for constant RHS.")
    def dependencies(self):
        return self.deps
    def coefficients(self):
        return self.coeffs
    def __str__(self):
        return hashlib.md5(str(self.form)).hexdigest()
class NonlinearRHS(RHS):
    '''For nonlinear problems, the source term isn't assembled in the usual way.
    If the nonlinear problem is given as
    F(u) = 0,
    we annotate it as
    M.u = M.u - F(u) .
    So in order to actually assemble the right-hand side term,
    we first need to solve F(u) = 0 to find the specific u,
    and then multiply that by the mass matrix.'''
    def __init__(self, form, F, u, bcs, mass, solver_parameters, J):
        '''form is M.u - F(u). F is the nonlinear equation, F(u) := 0.'''
        RHS.__init__(self, form)
        self.F = F
        self.u = u
        self.bcs = bcs
        self.mass = mass
        self.solver_parameters = solver_parameters
        self.J = J or backend.derivative(F, u)
        # We want to mark that the RHS term /also/ depends on
        # the previous value of u, as that's what we need to initialise
        # the nonlinear solver.
        var = adjglobals.adj_variables[self.u]
        self.ic_var = None
        if backend.parameters["adjoint"]["fussy_replay"]:
            can_depend = True
            try:
                prev_var = find_previous_variable(var)
            except:
                can_depend = False
            if can_depend:
                self.ic_var = prev_var
                self.deps += [self.ic_var]
                self.coeffs += [u]
            else:
                self.ic_copy = backend.Function(u)
                self.ic_var = None
    def __call__(self, dependencies, values):
        assert isinstance(self.form, ufl.form.Form)
        ic = self.u.function_space() # by default, initialise with a blank function in the solution FunctionSpace
        if hasattr(self, "ic_copy"):
            ic = self.ic_copy
        replace_map = {}
        for i in range(len(self.deps)):
            if self.deps[i] in dependencies:
                j = dependencies.index(self.deps[i])
                if self.deps[i] == self.ic_var:
                    ic = values[j].data # ahah, we have found an initial condition!
                else:
                    replace_map[self.coeffs[i]] = values[j].data
        current_F    = backend.replace(self.F, replace_map)
        current_J    = backend.replace(self.J, replace_map)
        u = backend.Function(ic)
        current_F    = backend.replace(current_F, {self.u: u})
        current_J    = backend.replace(current_J, {self.u: u})
        vec = adjlinalg.Vector(None)
        vec.nonlinear_form = current_F
        vec.nonlinear_u = u
        vec.nonlinear_bcs = self.bcs
        vec.nonlinear_J = current_J
        return vec
    def derivative_action(self, dependencies, values, variable, contraction_vector, hermitian):
        '''If variable is the variable for the initial condition, we want to ignore it,
        and set the derivative to zero. Assuming the solver converges, the sensitivity of
        the solution to the initial condition should be extremely small, and computing it
        is very difficult (one would have to do a little adjoint solve to compute it).
        Even I'm not that fussy.'''
        if variable == self.ic_var:
            deriv_value = values[dependencies.index(variable)].data
            return adjlinalg.Vector(None, fn_space=deriv_value.function_space())
        else:
            return RHS.derivative_action(self, dependencies, values, variable, contraction_vector, hermitian)
    def second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action):
        '''If variable is the variable for the initial condition, we want to ignore it,
        and set the derivative to zero. Assuming the solver converges, the sensitivity of
        the solution to the initial condition should be extremely small, and computing it
        is very difficult (one would have to do a little adjoint solve to compute it).
        Even I'm not that fussy.'''
        if inner_variable == self.ic_var or outer_variable == self.ic_var:
            deriv_value = values[dependencies.index(outer_variable)].data
            return adjlinalg.Vector(None, fn_space=deriv_value.function_space())
        else:
            return RHS.second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action)
    def derivative_assembly(self, dependencies, values, variable, hermitian):
        replace_map = {}
        for i in range(len(self.deps)):
            if self.deps[i] == self.ic_var: continue
            j = dependencies.index(self.deps[i])
            replace_map[self.coeffs[i]] = values[j].data
        diff_var = values[dependencies.index(variable)].data
        current_form = backend.replace(self.form, replace_map)
        deriv = backend.derivative(current_form, diff_var)
        if hermitian:
            deriv = backend.adjoint(deriv)
            bcs = [utils.homogenize(bc) for bc in self.bcs if isinstance(bc, backend.DirichletBC)] + [bc for bc in self.bcs if not isinstance(bc, backend.DirichletBC)]
        else:
            bcs = self.bcs
        return adjlinalg.Matrix(deriv, bcs=bcs)
def adj_get_forward_equation(i):
    (fwd_var, lhs, rhs) = adjglobals.adjointer.get_forward_equation(i)
    # We needed to cheat the annotation when we registered a nonlinear solve.
    # However, if we want to actually differentiate the form (e.g. to compute
    # the dependency of the form on a ScalarParameter) we're going to need
    # the real F(u) = 0 back again. So let's fetch it here:
    if hasattr(rhs, 'nonlinear_form'):
        lhs = rhs.nonlinear_form
        fwd_var.nonlinear_u = rhs.nonlinear_u
        rhs = 0
    else:
        lhs = lhs.data
        rhs = rhs.data
    return (fwd_var, lhs, rhs)
 | 
	lgpl-3.0 | -4,020,333,220,749,036,500 | 38.507407 | 167 | 0.630355 | false | 
| 
	loadimpact/http2-test | 
	fabfile.py | 
	1 | 
	11989 | 
	"""
We need two special hostnames to connect to: 
StationA
and 
StationB
"""
from __future__ import with_statement, print_function
from fabric.api import local, settings, abort, run, sudo, cd, hosts, env, execute
from fabric.contrib.console import confirm
from fabric.operations import put, get
from fabric.contrib.project import rsync_project
import re
import subprocess as sp 
import os.path 
from   StringIO import StringIO
current_dir = os.path.dirname(os.path.realpath(__file__))
VNC_LICENSE = [
    "xxxxx-xxxxx-xxxxx-xxxxx-xxxxx"
]
# TODO: Put a proper deployment mechanism here.
env.key_filename = '/home/alcides/.ssh/zunzun_ec2_keypair_0.pem'
StationA_H = '[email protected]'
StationB_H = '[email protected]'
Beefy_H    = '[email protected]'
# TODO: Make the IP number below deployment-specific...
Beefy_InternalIP = '192.168.112.131'
StationA_InternalIP = '192.168.112.129'
StationB_InternalIP = '192.168.112.130'
HomeDir_Name = "ubuntu"
@hosts(StationA_H)
def StationA():
    """
    Copies code to StationA 
    """
    rsync_project(
        local_dir = "scripts/StationA",
        remote_dir = ("/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name))
        )
    run("ln -sf /home/{HomeDir_Name}/StationA/onstartup.py /home/{HomeDir_Name}/onstartup.py".format(HomeDir_Name=HomeDir_Name))
@hosts(StationB_H)
def setup_dns_masq():
    sudo("apt-get install -y dnsmasq")
    put(
        local_path  = StringIO("addn-hosts=/home/{HomeDir_Name}/dnsmasq_more.conf\n".format(HomeDir_Name=HomeDir_Name)),
        remote_path = "/etc/dnsmasq.conf",
        use_sudo=True)
@hosts(StationB_H)
def StationB():
    """
    Copies both the chrome plugin and the DNSMasq watcher 
    """
    rsync_project(
        local_dir = "scripts/StationB",
        remote_dir = ("/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name))
        )
    rsync_project(
        local_dir = "scripts/StationA/chrome_captures_hars",
        remote_dir = (("/home/{HomeDir_Name}/StationB/".format(HomeDir_Name=HomeDir_Name)).format(HomeDir_Name=HomeDir_Name))
        )
    run("ln -sf /home/{HomeDir_Name}/StationB/onstartup.py /home/{HomeDir_Name}/onstartup.py".format(HomeDir_Name=HomeDir_Name))
@hosts(StationB_H)
def install_updatednsmasq_service():
    with settings(warn_only=True):
        sudo("service updatednsmasq stop")
    put(
        local_path = "scripts/StationB/configure_dnsmasq.py",
        remote_path = "/home/{HomeDir_Name}/StationB/configure_dnsmasq.py".format(HomeDir_Name=HomeDir_Name) ,
        use_sudo = True
        )
    put(
        local_path = StringIO("""
description "Update dnsmasq"
start on runlevel [2345]
stop on runlevel [!2345]
umask 022
console log
env PATH=/opt/openssl-1.0.2/bin/:/usr/bin:/usr/local/bin:/usr/sbin:/bin 
export PATH
env LD_LIBRARY_PATH=/opt/openssl-1.0.2/lib
export LD_LIBRARY_PATH
env USER={HomeDir_Name}
export USER
script 
    exec /usr/bin/python /home/{HomeDir_Name}/StationB/configure_dnsmasq.py
end script
""".format(HomeDir_Name=HomeDir_Name)),
        remote_path = "/etc/init/updatednsmasq.conf",
        use_sudo=True )
    sudo("service updatednsmasq start")
@hosts(Beefy_H)
def Beefy():
    sudo("apt-get update")
    sudo("apt-get -y install libgmp-dev")
@hosts(Beefy_H)
def BeefyRehMimic():
    with settings(warn_only=True):
        sudo("service mimic stop")
    put(
        local_path  = "dist/build/reh-mimic/reh-mimic",
        remote_path = "/home/{HomeDir_Name}/reh-mimic".format(HomeDir_Name=HomeDir_Name)
        )
    run("chmod ugo+x /home/{HomeDir_Name}/reh-mimic".format(HomeDir_Name=HomeDir_Name))
    sudo("rm /home/{HomeDir_Name}/mimic -rf".format(HomeDir_Name=HomeDir_Name) )
    rsync_project(
        local_dir  = "mimic",
        remote_dir = "/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name),
        )
    put(
        local_path  = "scripts/mimic.conf",
        remote_path = "/etc/init/mimic.conf",
        use_sudo    = True
        )
    sudo("touch /root/.rnd")
    sudo("service mimic start")
@hosts(Beefy_H, StationA_H, StationB_H )
def configure_logging():
    if env.host_string == StationA_H:
        put(
            local_path = StringIO("""$template Logentries,"199fb2e1-8227-4f73-9150-70a34a5d5e0c %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
            remote_path = "/etc/rsyslog.d/70-logentries.conf",
            use_sudo = True )
    elif env.host_string == StationB_H:
        put(
            local_path = StringIO("""$template Logentries,"3d2fd756-407a-4764-b130-1dd6f22a1b62 %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
            remote_path = "/etc/rsyslog.d/70-logentries.conf",
            use_sudo = True )
    else:
        put(
            local_path = StringIO("""$template Logentries,"7551d4e0-fa76-466f-8547-8c9a347a9363 %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
            remote_path = "/etc/rsyslog.d/70-logentries.conf",
            use_sudo = True )
        
    sudo("service rsyslog restart")
    # Check logging works...
    sudo("logger -t test Hello there Logentries")
@hosts (StationA_H, StationB_H)
def deploy_specific():
    if env.host_string == StationA_H:
        print("StationA deploy")
        StationA()
    elif env.host_string == StationB_H:
        print("StationB deploy")
        StationB()
    else: 
        print("Beefy station deploy")
        Beefy()
@hosts(StationA_H, StationB_H)
def apt_stations():
    sudo("apt-get update")
    sudo("apt-get install -y xutils xbase-clients xfonts-base xfonts-75dpi xfonts-100dpi")
    sudo("apt-get install -y python-pip")
    sudo("apt-get install -y xdotool")
    sudo("apt-get install -y xfwm4") 
@hosts(StationA_H, StationB_H)
def pythonlibs():
    sudo("pip install python-daemon>=2.0")
    sudo("pip install raven")
@hosts(Beefy_H, StationA_H, StationB_H)
def ssl():
    """
    Copies Openssl and curl to the target hosts...
    """
    sudo("mkdir -p /opt/openssl-1.0.2/")
    sudo(("chown {HomeDir_Name} /opt/openssl-1.0.2/".format(HomeDir_Name=HomeDir_Name)))
    rsync_project(
        local_dir = "/opt/openssl-1.0.2",
        remote_dir = "/opt/",
        extra_opts="-avz"
        )
    put(
        local_path = "scripts/ca-certificates.crt",
        remote_path = "/etc/ssl/certs/ca-certificates.crt",
        use_sudo = True
        )
@hosts(Beefy_H, StationA_H, StationB_H)
def ca():
    """
    Copies the ca certificate to the home...
    """
    put(
        local_path = "mimic-here/config/ca/cacert.pem",
        remote_path = ("/home/{HomeDir_Name}/cacert.pem".format(HomeDir_Name=HomeDir_Name)),
        use_sudo = True
        )
@hosts(StationA_H, StationB_H)
def install_vnc():
    """
    
    """
    # run("curl -L -o VNC.tar.gz https://www.realvnc.com/download/binary/1720/")
    # run("tar xvf VNC-5.2.3-Linux-x64-ANY.tar.gz")
    put(
        local_path = "scripts/VNC-5.2.3-Linux-x64-ANY.tar.gz",
        remote_path = ("/home/{HomeDir_Name}/VNC-5.2.3-Linux-x64-ANY.tar.gz".format(HomeDir_Name=HomeDir_Name)),
        use_sudo = True
        )
    run(("tar -xzf /home/{HomeDir_Name}/VNC-5.2.3-Linux-x64-ANY.tar.gz".format(HomeDir_Name=HomeDir_Name)))
    # Get a handier name.... 
    run("rm -rf vnc")
    run(("mv /home/{HomeDir_Name}/VNC-5.2.3-Linux-x64 /home/{HomeDir_Name}/vnc".format(HomeDir_Name=HomeDir_Name)))
    sudo(("/home/{HomeDir_Name}/vnc/vnclicense -add {VncLicense}".format(
        HomeDir_Name= HomeDir_Name,
        VncLicense = VNC_LICENSE[0]
    )))
    # Will demand some for of interactive input...
    run(("mkdir -p /home/{HomeDir_Name}/.vnc/".format(HomeDir_Name=HomeDir_Name)))
    run(("mkdir -p /home/{HomeDir_Name}/.vnc/config.d/".format(HomeDir_Name=HomeDir_Name)))
    sudo(("/home/{HomeDir_Name}/vnc/vncpasswd /home/{HomeDir_Name}/.vnc/config.d/Xvnc".format(HomeDir_Name=HomeDir_Name)))
    vnc_fix_permissions()
@hosts(StationA_H, StationB_H)
def vnc_fix_permissions():
    sudo(("chown {HomeDir_Name} /home/{HomeDir_Name}/.vnc -R").format(HomeDir_Name=HomeDir_Name))
@hosts(StationA_H, StationB_H)
def install_vnc_xstartup():
    run(("mkdir -p /home/{HomeDir_Name}/.vnc/".format(HomeDir_Name=HomeDir_Name)))
    run(("mkdir -p /home/{HomeDir_Name}/.vnc/config.d/".format(HomeDir_Name=HomeDir_Name)))
    put(
        local_path = "scripts/vnc-xstartup",
        remote_path = ("/home/{HomeDir_Name}/.vnc/xstartup".format(HomeDir_Name=HomeDir_Name))
        )
    run("chmod ugo+x /home/{HomeDir_Name}/.vnc/xstartup".format(HomeDir_Name=HomeDir_Name))
    put(
        local_path = "scripts/xvncfontpath",
        remote_path = ("/home/{HomeDir_Name}/.vnc/config.d/xvncfontpath".format(HomeDir_Name=HomeDir_Name))
        )
@hosts(StationA_H, StationB_H)
def setup_google_chrome():
    put(
        local_path = "scripts/google-chrome-stable_current_amd64.deb",
        remote_path = ("/home/{HomeDir_Name}/google-chrome-stable_current_amd64.deb".format(HomeDir_Name=HomeDir_Name)),
        use_sudo = True
        )
    really_setup_google_chrome()
@hosts(Beefy_H, StationA_H, StationB_H)
def ensure_local_hosts():
    # Get the contents of /etc/hosts
    local_file = StringIO()
    get(
        local_path = local_file,
        remote_path = "/etc/hosts",
        use_sudo = True 
        )
    hosts_file = local_file.getvalue()
    snippet =  """# DO NOT EDIT BELOW BY HAND
{Beefy_InternalIP} instr.httpdos.com
192.168.112.129 ip-192-168-112-129
192.168.112.130 ip-192-168-112-130
192.168.112.131 ip-192-168-112-131
# END DO NOT EDIT BELOW""".format(
    StationA_InternalIP = StationA_InternalIP,
    Beefy_InternalIP    = Beefy_InternalIP
    )
    mo = re.search(r"# DO NOT EDIT BELOW BY HAND\n(.*?)\n# END DO NOT EDIT BELOW", hosts_file, re.MULTILINE)
    if mo:
        part_before = hosts_file[:mo.start(0)]
        part_after = hosts_file[mo.end(0):]
        hosts_file = part_before + snippet + part_after
    else:
        hosts_file += "\n" + snippet
    put(
        local_path = StringIO(hosts_file),
        remote_path = "/etc/hosts",
        use_sudo = True
        )
@hosts(StationA_H, StationB_H)
def really_setup_google_chrome():
    sudo("apt-get update")
    sudo(("apt-get -f install -y".format(HomeDir_Name=HomeDir_Name)))
    sudo("apt-get install -y --fix-missing xdg-utils")
    sudo(("dpkg -i --force-depends /home/{HomeDir_Name}/google-chrome-stable_current_amd64.deb".format(HomeDir_Name=HomeDir_Name)))
    sudo(("apt-get -f install -y".format(HomeDir_Name=HomeDir_Name)))
@hosts(StationA_H, StationB_H)
def setup_vnc_service():
    put(
        local_path = "scripts/vncserv-{HomeDir_Name}.conf".format(HomeDir_Name=HomeDir_Name),
        remote_path = "/etc/init/vncserv.conf",
        use_sudo = True
        )
    put(
        local_path = "scripts/undaemon.py",
        remote_path = "/home/{HomeDir_Name}/undaemon.py".format(HomeDir_Name=HomeDir_Name)
        )
    run("chmod ugo+x /home/{HomeDir_Name}/undaemon.py".format(HomeDir_Name=HomeDir_Name))
    with settings(warn_only=True):
        sudo(
            "service vncserv start"
            )
@hosts(StationA_H, StationB_H)
def  disable_lightdm():
    contents = StringIO("manual")
    put(
        local_path = contents, 
        remote_path = "/etc/init/lightdm.override",
        use_sudo=True
        )
@hosts(StationA_H, StationB_H)
def touch_xauthority():
    run("touch $HOME/.Xauthority")
@hosts(StationA_H, StationB_H)
def deploy():
    execute(apt_stations)
    execute(setup_dns_masq)
    execute(setup_google_chrome)
    execute(deploy_specific)
    execute(touch_xauthority)
    execute(disable_lightdm)
    execute(StationA)
    execute(StationB)
    execute(Beefy)
    execute(ca)
    execute(ssl)
    execute(install_vnc)
    execute(install_vnc_xstartup)
    execute(ensure_local_hosts)
    execute(setup_vnc_service)
    execute(pythonlibs)
    execute(BeefyRehMimic)
    execute(install_updatednsmasq_service)    
 | 
	bsd-3-clause | -2,658,566,617,386,937,000 | 29.820051 | 131 | 0.63967 | false | 
| 
	iceflow/aws-demo | 
	s3/s3-bucket-copy/existing-bucket-copy/solutions/2-different-account-same-region/check_sqs_list.py | 
	1 | 
	1564 | 
	#!/usr/bin/python
# -*- coding: utf8 -*-
from pprint import pprint
import sys,os
import random
import json
import gzip
import random
import boto3
s3 = boto3.resource('s3')
client = boto3.client('sqs')
QUEUE_ENDPOINT='https://eu-west-1.queue.amazonaws.com/888250974927/s3-copy-list'
DST_BUCKET='ireland-leo-test'
def check_queue_status(qurl):
    #print('check_queue_status(%s)'%(qurl))
    #return {'number':0}
    response = client.get_queue_attributes(
        QueueUrl=qurl,
        AttributeNames=[
            'All'
        ]
    )
    #pprint(response)
    #{u'Attributes': {'ApproximateNumberOfMessages': '1',
    message_number=0
    if 'Attributes' in response:
        if 'ApproximateNumberOfMessages' in response['Attributes'] and 'ApproximateNumberOfMessagesNotVisible' in response['Attributes']:
            message_number=int(response['Attributes']['ApproximateNumberOfMessages'])
            not_visiable_message_number=int(response['Attributes']['ApproximateNumberOfMessagesNotVisible'])
            if message_number>0 or not_visiable_message_number>0:
                #print('%04d/%04d : %s'%(message_number, not_visiable_message_number, qurl))
                pass
                
    return {'number':message_number}
if __name__ == '__main__':
    qurl_endpoint=sys.argv[1]
    q_number=int(sys.argv[2])
    total_number=0
    for pos in xrange(q_number):
        response = check_queue_status('{0}-{1}'.format(qurl_endpoint, pos+1))
        total_number+=response['number']
        
    print total_number*100
    sys.exit(0)
 | 
	gpl-3.0 | 2,473,053,559,471,726,000 | 25.066667 | 137 | 0.641944 | false | 
| 
	burunduk3/testsys | 
	console.py | 
	1 | 
	19930 | 
	#!/usr/bin/env python3
import argparse, select, socket, sys, termios, time
from dts import config
from dts.protocol import Packet, PacketParser
class Console:
    NORMAL, LOCK, WAIT = range(3)
    def __init__( self, tab, command ):
        self.state = Console.NORMAL
        self.value = None
        self.__line = ''
        self.__message = ''
        self.__buffer = ''
        self.__tab = tab
        self.__command = command
        self.__start = 0
        self.__cursor = 0
        self.__width = 80
        # TODO: calculate width using termios and renew it using SIGWINCH
        self.__history = []
        self.__current = [x for x in self.__history] + [self.__line]
        self.__position = len(self.__history)
        self.__input = None
    def __enter( self ):
        if not self.__line:
            return
        self.__command(self.__line)
        if self.__line in self.__history:
            self.__history.remove(self.__line)
        self.__history.append(self.__line)
        if len(self.__history) > 100:
            del self.__history[0]
        self.__line = ''
        self.__cursor = 0
        self.__current = [x for x in self.__history] + [self.__line]
        self.__position = len(self.__history)
    def lock( self, message, value ):
        self.value = value
        self.state = Console.LOCK
        self.__message = message
        print('\r\033[K' + self.__message, end='')
    def unlock( self, message, wait=True ):
        self.state = Console.WAIT if wait else Console.NORMAL
        self.__message = message
        print('\r\033[K' + self.__message, end='')
        if not wait:
            self.write(self.__buffer)
            self.__buffer = ''
            self.redraw ()
    def write( self, text, force = False ):
        if len(text) != 0 and text[-1] != '\n':
            text += '%\n' # zsh style
        if self.state is Console.LOCK or force:
            print('\r\033[K' + text + self.__message, end='')
        elif self.state is Console.WAIT:
            self.__buffer += text
        else:
            print('\r\033[K' + text, end='')
            self.redraw()
        sys.stdout.flush()
    def redraw( self ):
        assert self.state is Console.NORMAL
        if self.__cursor < self.__start:
            self.__start = self.__cursor
        if self.__cursor - self.__start + 2 >= self.__width:
            self.__start = self.__cursor + 3 - self.__width
        if self.__cursor != len(self.__line) and self.__cursor - self.__start + 3 == self.__width:
            self.__start += 1
        start = '\033[1m<\033[0m' if self.__start != 0 else ' '
        finish = '\033[1m>\033[0m' if self.__start + self.__width - 2 < max(len(self.__line), self.__cursor + 1) else ''
        visual = self.__line[self.__start:self.__start + self.__width - (3 if finish != '' else 2)]
        move = self.__start + len(visual) + (1 if finish != '' else 0) - self.__cursor
        print('\r\033[K>' + start + visual + finish + ('\033[%dD' % move if move != 0 else ''), end='')
    def add( self, text ):
        if self.state is Console.LOCK:
            return
        for key in text:
            # TODO: setup keys for different terminals
            if self.__input is not None:
                self.__input += key
                if self.__input == "[A":
                    if self.__position != 0:
                        self.__current[self.__position] = self.__line
                        self.__position -= 1
                        self.__line = self.__current[self.__position]
                        self.__cursor = len(self.__line)
                        self.__start = 0
                elif self.__input == "[B":
                    if self.__position != len(self.__history):
                        self.__current[self.__position] = self.__line
                        self.__position += 1
                        self.__line = self.__current[self.__position]
                        self.__cursor = len(self.__line)
                        self.__start = 0
                elif self.__input == "[C":
                    self.__cursor = min(len(self.__line), self.__cursor + 1)
                elif self.__input == "[D":
                    self.__cursor = max(0, self.__cursor - 1)
                elif self.__input == "[1;5C":
                    pass # Ctrl+←
                elif self.__input == "[1;5D":
                    pass # Ctrl+→
                elif self.__input == "[1~":
                    self.__cursor = 0
                elif self.__input == "[3~":
                    self.__line = self.__line[0:self.__cursor] + self.__line[self.__cursor + 1:]
                elif self.__input == "[4~":
                    self.__cursor = len(self.__line)
                elif len(self.__input) > 5:
                    self.write("[debug] unknown escape sequence: \e%s\n" % self.__input)
                else:
                    continue
                self.__input = None
                continue
            if self.state is Console.WAIT:
                self.state = Console.NORMAL
                self.write(self.__buffer)
                self.__buffer = ''
                continue
            if ord(key) >= 0x20 and ord(key) != 0x7f:
                self.__line = self.__line[0:self.__cursor] + key + self.__line[self.__cursor:]
                self.__cursor += 1
            elif ord(key) == 0x09:
                bonus, result = self.__tab(self.__line[0:self.__cursor])
                if bonus:
                    result += ' '
                if result is not None:
                    self.__line = result + self.__line[self.__cursor:]
                    self.__cursor = len(result)
            elif ord(key) == 0x0a:
                self.__enter()
            elif ord(key) == 0x23:
                pass # Ctrl+W
            elif ord(key) == 0x7f:
                if self.__cursor == 0:
                    continue;
                self.__cursor -= 1
                self.__line = self.__line[0:self.__cursor] + self.__line[self.__cursor + 1:]
            elif ord(key) == 0x1b:
                self.__input = ''
            else:
                global count
                count += 1
                self.write("[debug] count = %d, key=%d\n" % (count, ord(key)), force=True)
        self.redraw()
def tab( line ):
    # TODO: optimize with prefix tree
    commands = config.commands
    targets = sorted([x for x in commands if x.startswith(line)])
    if len(targets) == 0:
        return (False, None)
    if len(targets) == 1:
        return (True, targets[0])
    index = 0
    while index < len(targets[0]) and index < len(targets[1]) and targets[0][index] == targets[1][index]:
        index += 1
    if index > len(line):
        return (False, targets[0][0:index])
    console.write(' '.join(targets) + '\n')
    return (False, None)
def console_command( line ):
    queue.append((command, line))
def command( line ):
    global command_id
    packet_id = "id_%08d" % command_id
    command_id += 1
    console.lock("*** waiting for testsys answer ***", packet_id)
    s.send(Packet({'ID': packet_id, 'Command': line})())
parser = argparse.ArgumentParser(description="Text console for testsys.")
parser.add_argument('--password-file', '-p', action='store', dest='key_file', required=True)
parser.add_argument('--name', '-n', action='store', dest='name', help='Displayed name.')
parser.add_argument('--contest', '-c', action='store', dest='contest', help='Select contest.')
parser.add_argument('--msglevel', '-m', action='store', dest='msglevel', help='Initial testsys verbosity level.')
parser.add_argument('server', metavar='<host>:<port>', help='Address of testsys server.')
args = parser.parse_args()
fd = sys.stdin.fileno()
tty_attr_old = termios.tcgetattr(fd)
attributes = termios.tcgetattr(fd)
attributes[3] &= ~(termios.ECHO | termios.ICANON)
attributes[6][termios.VMIN] = 0
termios.tcsetattr(fd, termios.TCSADRAIN, attributes)
console = Console(tab, console_command)
console.lock('*** connecting to testsys ***', "id_pre0")
command_id = 0
reconnect_id = 0
with open(args.key_file, 'rb') as key_file:
    key = key_file.read().decode(config.encoding)
    assert len(key) == 256, "invalid key: should be 256 bytes length"
# TODO: default port 17240, or not
host, port = args.server.split(':')
port = int(port)
poll = select.epoll()
poll.register(sys.stdin, select.EPOLLIN)
try:
    s = socket.socket()
    poll.register(s, select.EPOLLIN)
    s.connect((host, port))
    s.send(Packet({'Password': key, 'Command': "ver", 'ID': "id_pre0"})())
    if args.msglevel is not None:
        console.value = "id_pre1"
        s.send(Packet({'Command': "msg_level " + args.msglevel, 'ID': "id_pre1"})())
    if args.name is not None:
        console.value = "id_pre2"
        s.send(Packet({'Command': "name " + args.name, 'ID': "id_pre2"})())
    if args.contest is not None:
        console.value = "id_pre3"
        s.send (Packet ({'Command': "select_contest " + args.contest, 'ID': "id_pre3"})())
except KeyboardInterrupt:
    console.lock("terminated by KeyboardInterrupt", "never")
    print("");
    termios.tcsetattr(fd, termios.TCSADRAIN, tty_attr_old)
    sys.exit(1)
def reconnect():
    global reconnect_id, is_reconnect, s
    is_reconnect = True
    packet_id = "reconnect%d" % reconnect_id
    console.lock("*** reconnecting ***", packet_id + "_2")
    reconnect_id += 1
    del action[s.fileno()]
    try:
        poll.unregister(s)
        s.close()
        time.sleep(1)
        s = socket.socket()
        poll.register(s, select.EPOLLIN)
        action[s.fileno()] = handle_socket
        console.write("", force=True)
        s.connect((host, port))
        s.send(Packet({'Password': key, 'Command': "ver", 'ID': packet_id + "_0"})())
        if args.msglevel is not None:
            s.send(Packet({'Command': "msg_level " + args.msglevel, 'ID': packet_id + "_1"})())
        if args.name is not None:
            s.send(Packet({'Command': "name " + args.name, 'ID': packet_id + "_2"})())
        if args.contest is not None:
            s.send(Packet({'Command': "select_contest " + args.contest, 'ID': packet_id + "_3"})())
    except IOError as e:
        console.write("\033[31;1mexception while reconnecting: " + str(e) + "\033[0m\n")
        queue.append((reconnect, ()))
def handle_socket( handle, events ):
    if events & select.EPOLLIN:
        events &= ~select.EPOLLIN
        try:
            parser.add(s.recv(4096, socket.MSG_DONTWAIT))
            queue.append((handle_parser, ()))
        except IOError as e:
            console.write("\033[31;1mlost connection to testsys: recv: " + str(e) + "\033[0m\n", force=True)
            queue.append((reconnect, ()))
    if events & select.EPOLLERR:
        events &= ~select.EPOLLERR
        console.write("\033[31;1mlost connection to testsys: err\033[0m\n", force=True)
        queue.append((reconnect, ()))
    if events & select.EPOLLHUP:
        events &= ~select.EPOLLHUP
        console.write("\033[31;1mlost connection to testsys: hup\033[0m\n", force=True)
        queue.append((reconnect, ()))
    if events != 0:
        console.write("ERROR: cannot handle event %d (h=%d)\n" % (events, handle), force=True)
        count = 200
def handle_stdin( handle, events ):
    if events & select.EPOLLIN:
        events &= ~select.EPOLLIN
        console.add(sys.stdin.read(4096))
    if events != 0:
        console.write("ERROR: cannot handle event %d (h=%d)\n" % (events, handle), force=True)
        count = 4
def handle_parser():
    global is_reconnect
    for packet in parser():
        # console.write("[debug] work with packet %s\n" % str(packet))
        if 'Log' in packet:
            console.write(packet['Log'] + '\n')
        if 'Message' in packet:
            if packet['Message'] is not None:
                message = packet['Message']
                if message[-1] != '\n': message += '\n'
                console.write(message)
            else:
                console.write('\033[31;1mERROR: “Message” field exists in packet but is None.\033[0m\n')
        if 'Chat' in packet:
            console.write('\033[1m' + packet['Chat'] + '\033[0m\n')
        if 'ID' in packet:
            if console.state is Console.LOCK and console.value == packet['ID']:
                if is_reconnect:
                    console.unlock ('', wait=False)
                    is_reconnect = False
                else:
                    console.unlock('... press any key ...')
        # TODO: check for ignored keys
is_reconnect = False
action = {s.fileno() : handle_socket, sys.stdin.fileno(): handle_stdin}
parser = PacketParser()
count = 0
while True:
    queue = []
    # console.write("[debug] ready to poll\n", force=True)
    try:
        for handle, events in poll.poll():
            # console.write("[debug] poll: handle=%d, event_mask=%d (IN=%d,OUT=%d,ERR=%d)\n" % (handle, events, select.EPOLLIN, select.EPOLLOUT, select.EPOLLERR), force=True)
            if handle in action:
                queue.append((action[handle], (handle, events)))
            else:
                console.write("ERROR: cannot handle %d\n" % handle, force=True)
    except IOError as e:
        console.write("\033[31;1mERROR: " + str(e) + "\033[0m\n")
        queue.append((reconnect, ()))
    except KeyboardInterrupt:
        console.lock("terminated by KeyboardInterrupt", "never")
        print("");
        break
    except Exception:
        console.write("\033[31;1mERROR: " + str(e) + "\033[0m\n")
        break
    for f, p in queue:
        # console.write("[debug] next action\n")
        if isinstance(p, tuple):
            f(*p)
        else:
            f(p)
    # console.write("[debug] out of actions\n")
    if count > 10:
        break
termios.tcsetattr(fd, termios.TCSADRAIN, tty_attr_old)
#estSysConsoleTerminal::TestSysConsoleTerminal() : in(), console(), debug(), socket(), mutex(0), lock(), waitID(""), history(), counter(0)
#oid TestSysConsoleTerminal::exec( const String &name, String const &msglevel, const String &server, const String &keyFileName )
#
# File debugFile = File::openWrite("console_term.log");
# TerminalDevice debugConsole = TerminalDevice(console);
# debug.addOutput(debugFile, DEBUG_FULLLOGGING);
# debug.addOutput(debugConsole, DEBUG_EXTRADEBUG);
#
# bool exitFlag = false;
#
# tamias::MethodThread<TestSysConsoleTerminal> thread = tamias::MethodThread<TestSysConsoleTerminal>(this, &TestSysConsoleTerminal::secondLoop);
# while (true) {
#   try {
#     thread.create();
#     dts::PacketParser parser;
#     while (true) {
#       while (parser.packetReady())
#       {
#         Packet packet = parser.nextPacket();
#         for (sizetype i = 0; i < packet.values().size(); i++)
#         {
#           String packetName = escapeString(String::fromUtf8(Packet::ibm8662utf8(packet.values()[i].first)));
#           String packetValue = escapeString(String::fromUtf8(Packet::ibm8662utf8(packet.values()[i].second)));
#           debug.output(DEBUG_FULLLOGGING, "  “%s” = “%s”") << packetName << packetValue;
#         }
#         for (sizetype i = 0; i < packet.values().size(); i++)
#         {
#           String packetName = String::fromUtf8(Packet::ibm8662utf8(packet.values()[i].first));
#           String packetValue = String::fromUtf8(Packet::ibm8662utf8(packet.values()[i].second));
#           else if (packetName != "")
#           {
#             // lock.lock();
#             debug.output(DEBUG_DIAGNOSTIC, "unknow field in packet: “%s” -> “%s”") << packetName << packetValue; // TODO: semaphore!!!
#             // lock.unlock();
#           }
#         }
#       }
#     }
#     // lock.lock();
#     thread.cancel();
#     // lock.unlock();
#     console.setInput(" *** disconnected ***", 0);
#     socket.disconnect();
#     if (exitFlag)
#       break;
#     tamias::Thread::sleep(1);
#   } catch (tamias::Exception &e ) {
#     debug.output(DEBUG_INFORMATION, "exception!");
#     socket.disconnect();
#   }
# }
#
#
#tring TestSysConsoleTerminal::readCommand()
#
# Vector <Pair <String, sizetype> > currentHistory;
# for (sizetype i = 0; i < history.size(); i++)
#   currentHistory.pushBack(makePair(history[i], history[i].length()));
# sizetype historyIndex = currentHistory.size();
# currentHistory.pushBack(makePair(String(""), 0));
#/  String command = "";
#/  sizetype cursor = 0;
# while (true)
# {
#   String &command = currentHistory[historyIndex].first;
#   sizetype &cursor = currentHistory[historyIndex].second;
#   // lock.lock();
#   console.setInput("> " + command, cursor + 2);
#   // lock.unlock();
#   int key = in.nextKey();
#   switch (key)
#   {
#     case TerminalReader::KEY_UP:
#       if (historyIndex > 0)
#         historyIndex--;
#       break;
#     case TerminalReader::KEY_DOWN:
#       if (historyIndex < currentHistory.size() - 1)
#         historyIndex++;
#       break;
#     case TerminalReader::KEY_LEFT:
#     case TerminalReader::KEY_RIGHT:
#     case TerminalReader::KEY_HOME:
#     case TerminalReader::KEY_END:
#     case TerminalReader::KEY_BACKSPACE:
#     case TerminalReader::KEY_BACKSPACE2:
#     case TerminalReader::KEY_TAB:
#     case TerminalReader::KEY_DELETE:
#     case TerminalReader::KEY_ENTER:
#       // lock.lock();
#       console.setInput("", 0);
#       console.output("> " + command + "\n");
#       // lock.unlock();
#       return command;
#     default:
#   }
# }
#
#
#oid TestSysConsoleTerminal::secondLoop()
#
# while (true)
# {
#   String command = readCommand();
#/    bool was = false;
#/    for (sizetype i = 0; i < history.size(); i++)
#/      if (history[i] == command)
#/        was = true, i = history.size();
#/  TODO: better ignore dups
#   if (history.size() == 0 || history[history.size() - 1] != command)
#     history.pushBack(command);
#/ TODO:     while (history.size() >= 100)
#/    {
#/      for (sizetype i = 1; i < history.size(); i++)
#/        history[i - 1] = history[i];
#/      history.popBack();
#/    }
#   // lock.lock();
#   console.setInput(" --- waiting for testsys outcome ---", 0);
#   // lock.unlock();
#   // TODO: exit by Ctrl+D
#   String id = tamias::Format::intToString(counter++);
#   while (id.length() < 8) id = '0' + id; id = "id_" + id;
#   Packet packet;
#   packet.addValue("Command", Packet::utf82ibm866(command.toUtf8()));
#   packet.addValue("ID", id.toUtf8());
#   // lock.lock();
#   waitID = id;
#   mutex.set(1);
#   socket.write(packet.result());
#   // lock.unlock();
#   mutex.wait(1);
#   // lock.lock();
#   console.setInput(" --- press any key ---", 0);
#   // lock.unlock();
#   in.nextKey();
#   mutex.set(0, true);
# }
#
#
#
#namespace dts {
#  class TestSysConsoleTerminal   {
#    public:
#      TestSysConsoleTerminal();
#      ~TestSysConsoleTerminal();
#      void exec( const tamias::String &name, tamias::String const &msglevel, const tamias::String &server, const tamias::String &keyFileName );
#
#    private:
#      enum DebugLevel {
#        DEBUG_NOLOGGING = 0,
#        DEBUG_FATAL = 1,
#        DEBUG_ERROR = 2,
#        DEBUG_WARNING = 3,
#        DEBUG_INFORMATION = 4,
#        DEBUG_DIAGNOSTIC = 5,
#        DEBUG_DEBUG = 6,
#        DEBUG_EXTRADEBUG = 7,
#        DEBUG_FULLLOGGING = 8
#      };
#
#      burunduk3::TerminalReader in;
#      burunduk3::TerminalConsole console;
#      tamias::wtf::DebugOutput debug;
#      tamias::TcpClientSocket socket;
#      tamias::Mutex mutex;
#      tamias::Semaphore lock;
#      tamias::String waitID;
#      tamias::Vector <tamias::String> history;
#      tamias::Vector <tamias::String> testsysCommands;
#      int counter;
#  
#      tamias::String readCommand();
#      tamias::String tabCompletion( const tamias::String &prefix );
#      void secondLoop();
#  
#      TestSysConsoleTerminal( const TestSysConsoleTerminal &termninal );
#      TestSysConsoleTerminal& operator = ( const TestSysConsoleTerminal &terminal );
#  };
#}
 | 
	gpl-3.0 | 7,367,696,288,431,108,000 | 36.91619 | 174 | 0.550035 | false | 
| 
	deculler/DataScienceTableDemos | 
	CalGrads/timetable.py | 
	1 | 
	7684 | 
	from datascience import Table
import numpy as np
from scipy.interpolate import UnivariateSpline
class TimeTable(Table):
    """Table with a designated column as a sequence of times in the first column."""
    def __init__(self, *args, time_column = 'Year'):
        Table.__init__(self, *args)
        self.time_column = time_column
    
    def clone_bare(self):
        return TimeTable(time_column = self.time_column)
    
    def clone_time(self):
        return self.clone_bare().with_column(self.time_column, self[self.time_column])
            
    @classmethod
    def from_table(cls, tbl, time_col):
        ttbl = cls(time_column = time_col)
        for label in tbl.labels:
            ttbl[label] = tbl[label]
        return ttbl
    
    def __get_attr__(self, name):
        def wrapper(*args, **kwargs):
            # Wrap superclass method to coerce result back to TimeTable
            tbl = self.name(*args, **kwargs)
            if isinstance(tbl, Table) and self.time_column in tbl.labels:
                return TimeTable.from_table(tbl, self.time_column)
            else:
                return tbl
        print("Get TimeTable Attr", name)
        if hasattr(Table, name):
            return wrapper
        else:
            raise AttributeError
            
    @classmethod
    def by_time(cls, tbl, time_col, category_col, collect_col, collect=sum):
        """Construct a time table by aggregating rows of each category by year."""
        tbl_by_year = tbl.select([category_col, time_col, collect_col]).pivot(category_col, time_col, 
                                                                              collect_col, collect=collect)
        return cls(tbl_by_year.labels, time_column=time_col).append(tbl_by_year)
    
    @property
    def categories(self):
        return [label for label in self.labels if label != self.time_column]
    
    # TimeTable methods utilizing time_column
    def order_cols(self):
        """Create a TimeTable with categories ordered by the values in last row."""
        def col_key(label):
            return self.row(self.num_rows-1)[self.labels.index(label)]
        order = sorted(self.categories, key=col_key, reverse=True)
        tbl = self.copy()
        for label in order:
            tbl.move_to_end(label)
        return tbl
    
    def oplot(self, **kwargs):
        return self.order_cols().plot(self.time_column, **kwargs)
    
    def top(self, n):
        """Create a new TimeTable containing the n largest columns."""
        ttbl = self.order_cols()
        return ttbl.select(ttbl.labels[0:n+1])
    
    def after(self, timeval):
        return self.where(self[self.time_column] >= timeval)
    
    def sum_rows(self):
        """Sum the rows in a TimeTable besides the time column."""
        tbl = self.drop(self.time_column)
        return [sum(row) for row in tbl.rows]
    
    def apply_cols(self, fun):
        """Apply a function to the non-time columns of TimeTable."""
        return Table().with_columns([(lbl, fun(self[lbl])) for lbl in self.categories])
    
    def apply_all(self, fun):
        ttbl = TimeTable(time_column = self.time_column)
        for lbl in self.labels:
            if lbl == self.time_column:
                ttbl[lbl] = self[self.time_column]
            else:
                ttbl[lbl] = self.apply(fun, lbl)
        return ttbl
   
    def ratio(self, tbl_denom):
        """Create ratio of a TimeTable to a matching one."""
        rtbl = TimeTable(time_column = self.time_column).with_column(self.time_column, self[self.time_column])
        for label in self.categories:
            rtbl[label] = self[label] / tbl_denom[label]
        return rtbl
    
    def normalize(self, col_label):
        """Normalize each column of a timetable by a particular one"""
        rtbl = TimeTable(time_column = self.time_column).with_column(self.time_column, self[self.time_column])
        for label in self.categories:
            rtbl[label] = self[label] / self[col_label]
        return rtbl
    
    def delta(self):
        """Construct a TimeTableable of successive differences down each non-time column."""
        delta_tbl = self.clone_bare()
        delta_tbl[self.time_column] = self[self.time_column][1:]
        for col in self.categories:
            delta_tbl[col] = self[col][1:] - self[col][:-1]
        return delta_tbl
    
    def fill(self, interval=1):
        times = [t for t in np.arange(self[self.time_column][0], self[self.time_column][-1] + interval, interval)]
        ftbl = TimeTable(time_column = self.time_column).with_column(self.time_column, times)
        for col in self.categories:
            spl = UnivariateSpline(self[self.time_column], self[col])
            ftbl[col] = spl(times)
        return ftbl
    
    def interp(self, interval=1):
        times = [t for t in np.arange(self[self.time_column][0], self[self.time_column][-1] + interval, interval)]
        ftbl = TimeTable(time_column = self.time_column).with_column(self.time_column, times)
        for col in self.categories:
            ftbl[col] = np.interp(times, self[self.time_column], self[col])
        return ftbl
    def rel_delta(self):
        """Construct a TimeTableable of successive differences down each non-time column."""
        delta_tbl = self.clone_bare()
        delta_tbl[self.time_column] = self[self.time_column][1:]
        time_delta = self[self.time_column][1:] - self[self.time_column][:-1]
        for col in self.categories:
            delta_tbl[col] = (1+(self[col][1:] - self[col][:-1])/self[col][:-1])/time_delta
        return delta_tbl
    
    def norm_by_row(self, base_row=0):
        """Normalize columns of a TimeTable by a row"""
        normed_tbl = self.clone_time()
        for label in self.categories:
            normed_tbl[label] = self[label]/self[label][base_row]
        return normed_tbl
    
    def norm_by_time(self, time):
        return self.norm_by_row(np.where(self[self.time_column] == time)[0][0])
    
    def sum_cols(self):
        """Sum the columns of TimeTable."""
        csum = 0
        for c in self.categories:
            csum += self[c]
        return csum
    
    def fraction_cols(self):
        """Convert each column to a fraction by row."""
        total = self.sum_cols()
        ftbl = self.clone_time()
        for lbl in self.categories:
            ftbl[lbl] = self[lbl]/total
        return ftbl
    
    def forecast_table(self, past, ahead, inc=1):
        """Project a TimeTable forward.  inc must match the interval"""
        last_time = self[self.time_column][-1]
        past_times = self[self.time_column][-past-1:-1]
        fore_time = np.arange(last_time + inc, last_time + inc + ahead, inc)
        def project(lbl):
            m, b = np.polyfit(past_times, self[lbl][-past-1:-1], 1)
            return [m*time + b for time in fore_time]
        xtbl = Table().with_columns([(self.time_column, fore_time)] + [(label, project(label)) for label in self.categories])
        return self.copy().append(xtbl)
    
    def extend_table(self, ahead, inc=1):
        """Project a TimeTable forward from last interval.  inc must match the interval"""
        last_time = self[self.time_column][-1]
        fore_time = np.arange(last_time + inc, last_time + inc + ahead, inc)
        def project(lbl):
            b = self[lbl][-1]
            m = self[lbl][-1] - self[lbl][-2]
            return [m*(time+1)*inc + b for time in range(ahead)]
                                             
        xtbl = Table().with_columns([(self.time_column, fore_time)] + [(label, project(label)) for label in self.categories])
        return self.copy().append(xtbl)
 | 
	bsd-2-clause | 1,163,413,192,845,707,000 | 40.989071 | 125 | 0.587585 | false | 
| 
	elliotthill/django-oscar | 
	oscar/apps/order/migrations/0019_auto__chg_field_order_billing_address__chg_field_order_user__chg_field.py | 
	1 | 
	37418 | 
	# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
    def forwards(self, orm):
        # Changing field 'Order.billing_address'
        db.alter_column(u'order_order', 'billing_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True, on_delete=models.SET_NULL))
        # Changing field 'Order.user'
        db.alter_column(u'order_order', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm[AUTH_USER_MODEL]))
        # Changing field 'Order.shipping_address'
        db.alter_column(u'order_order', 'shipping_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True, on_delete=models.SET_NULL))
    def backwards(self, orm):
        # Changing field 'Order.billing_address'
        db.alter_column(u'order_order', 'billing_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True))
        # Changing field 'Order.user'
        db.alter_column(u'order_order', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm[AUTH_USER_MODEL]))
        # Changing field 'Order.shipping_address'
        db.alter_column(u'order_order', 'shipping_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True))
    models = {
        u'address.country': {
            'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
            'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
            'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
            'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
            'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
            'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
        },
        u'auth.group': {
            'Meta': {'object_name': 'Group'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
            'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
        },
        u'auth.permission': {
            'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
            'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
            'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
        },
        AUTH_USER_MODEL: {
            'Meta': {'object_name': AUTH_USER_MODEL_NAME},
            'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
            'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
            'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
            'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
            'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
            'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
            'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
            'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
            'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
            'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
        },
        u'catalogue.attributeentity': {
            'Meta': {'object_name': 'AttributeEntity'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
            'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': u"orm['catalogue.AttributeEntityType']"})
        },
        u'catalogue.attributeentitytype': {
            'Meta': {'object_name': 'AttributeEntityType'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
        },
        u'catalogue.attributeoption': {
            'Meta': {'object_name': 'AttributeOption'},
            'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': u"orm['catalogue.AttributeOptionGroup']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
        },
        u'catalogue.attributeoptiongroup': {
            'Meta': {'object_name': 'AttributeOptionGroup'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
        },
        u'catalogue.category': {
            'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
            'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
            'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
            'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
            'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
            'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
        },
        u'catalogue.option': {
            'Meta': {'object_name': 'Option'},
            'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
        },
        u'catalogue.product': {
            'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
            'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.ProductAttribute']", 'through': u"orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
            'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Category']", 'through': u"orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
            'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
            'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': u"orm['catalogue.Product']"}),
            'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
            'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
            'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
            'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Product']", 'symmetrical': 'False', 'through': u"orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
            'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
            'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
            'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
            'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
            'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
        },
        u'catalogue.productattribute': {
            'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
            'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
            'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
            'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
            'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
            'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
        },
        u'catalogue.productattributevalue': {
            'Meta': {'object_name': 'ProductAttributeValue'},
            'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.ProductAttribute']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': u"orm['catalogue.Product']"}),
            'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
            'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
            'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
            'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
            'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
            'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
            'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
        },
        u'catalogue.productcategory': {
            'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
            'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Category']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
            'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
        },
        u'catalogue.productclass': {
            'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
            'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
            'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
            'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
        },
        u'catalogue.productrecommendation': {
            'Meta': {'object_name': 'ProductRecommendation'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': u"orm['catalogue.Product']"}),
            'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
            'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
        },
        u'contenttypes.contenttype': {
            'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
            'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
        },
        u'customer.communicationeventtype': {
            'Meta': {'object_name': 'CommunicationEventType'},
            'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
            'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
            'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'null': 'True', 'blank': 'True'})
        },
        u'order.billingaddress': {
            'Meta': {'object_name': 'BillingAddress'},
            'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
            'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
            'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
            'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
            'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
        },
        u'order.communicationevent': {
            'Meta': {'ordering': "['-date_created']", 'object_name': 'CommunicationEvent'},
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['customer.CommunicationEventType']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': u"orm['order.Order']"})
        },
        u'order.line': {
            'Meta': {'object_name': 'Line'},
            'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': u"orm['order.Order']"}),
            'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['partner.Partner']"}),
            'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
            'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
            'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
            'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'stockrecord': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['partner.StockRecord']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
            'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
        },
        u'order.lineattribute': {
            'Meta': {'object_name': 'LineAttribute'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': u"orm['order.Line']"}),
            'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['catalogue.Option']"}),
            'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
        },
        u'order.lineprice': {
            'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': u"orm['order.Line']"}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': u"orm['order.Order']"}),
            'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
            'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
            'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
        },
        u'order.order': {
            'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
            'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
            'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.BillingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
            'currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
            'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
            'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
            'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
            'shipping_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
            'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
            'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
            'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
            'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
            'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
            'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
        },
        u'order.orderdiscount': {
            'Meta': {'object_name': 'OrderDiscount'},
            'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
            'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
            'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
            'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': u"orm['order.Order']"}),
            'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
            'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
        },
        u'order.ordernote': {
            'Meta': {'object_name': 'OrderNote'},
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'message': ('django.db.models.fields.TextField', [], {}),
            'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': u"orm['order.Order']"}),
            'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['{}']".format(AUTH_USER_MODEL), 'null': 'True'})
        },
        u'order.paymentevent': {
            'Meta': {'ordering': "['-date_created']", 'object_name': 'PaymentEvent'},
            'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.PaymentEventType']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['order.Line']", 'through': u"orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': u"orm['order.Order']"}),
            'reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
            'shipping_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'null': 'True', 'to': u"orm['order.ShippingEvent']"})
        },
        u'order.paymenteventquantity': {
            'Meta': {'object_name': 'PaymentEventQuantity'},
            'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.PaymentEvent']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_event_quantities'", 'to': u"orm['order.Line']"}),
            'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
        },
        u'order.paymenteventtype': {
            'Meta': {'ordering': "('name',)", 'object_name': 'PaymentEventType'},
            'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
        },
        u'order.shippingaddress': {
            'Meta': {'object_name': 'ShippingAddress'},
            'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
            'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
            'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
            'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
            'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
            'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
            'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
            'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
        },
        u'order.shippingevent': {
            'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingEventType']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': u"orm['order.ShippingEventQuantity']", 'to': u"orm['order.Line']"}),
            'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
            'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': u"orm['order.Order']"})
        },
        u'order.shippingeventquantity': {
            'Meta': {'object_name': 'ShippingEventQuantity'},
            'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.ShippingEvent']"}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': u"orm['order.Line']"}),
            'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
        },
        u'order.shippingeventtype': {
            'Meta': {'ordering': "('name',)", 'object_name': 'ShippingEventType'},
            'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
        },
        u'partner.partner': {
            'Meta': {'object_name': 'Partner'},
            'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
            'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
        },
        u'partner.stockrecord': {
            'Meta': {'unique_together': "(('partner', 'partner_sku'),)", 'object_name': 'StockRecord'},
            'cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
            'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'low_stock_threshold': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
            'num_allocated': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
            'num_in_stock': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
            'partner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['partner.Partner']"}),
            'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
            'price_currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
            'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'price_retail': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
            'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['catalogue.Product']"})
        },
        u'sites.site': {
            'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
            'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
        }
    }
    complete_apps = ['order'] | 
	bsd-3-clause | -2,684,955,528,462,074,000 | 90.043796 | 224 | 0.562163 | false | 
| 
	danger89/stanford-parser-clientserver | 
	stanford_server.py | 
	1 | 
	3076 | 
	#!/usr/bin/env jython
# -*- coding: utf-8 -*-
# Copyright 2014 by Melroy van den Berg
"""
Stanford Parser Server running on localhost
Requirements
------------
	- Jython >= 2.7 (http://www.jython.org/downloads.html)
	- Pyro4 (https://github.com/irmen/Pyro4)
"""
__author__ = "Melroy van den Berg <[email protected]>"
__version__ = "0.1"
import socket
from select import cpython_compatible_select as select
import sys
import Pyro4.core
import Pyro4.naming
from stanford_interface import StanfordParser
PYRO_NAME = 'stanford.server'
Pyro4.config.SERVERTYPE="thread" # Thread pool based
#Pyro4.config.SERVERTYPE="multiplex" # Select/poll based
hostname="localhost" #socket.gethostname()
class StanfordHelpParser(object):
	"""
	Helper class around the StanfordParser class
	"""
	def __init__(self):
		"""
		Setup the Stanford Parser
		"""
		# Jar file should be set inside the stanford_lib.py or add it manually to the class path
		self.parser = StanfordParser(parser_file='./englishPCFG.ser.gz')
	def parse(self, wordList):
		"""
		Parse the word list
		"""
		sentenceObject = self.parser.parse_wordlist(wordList)	
		return str(sentenceObject.get_parse())
print("initializing services... servertype=%s" % Pyro4.config.SERVERTYPE)
# start a name server (only with a broadcast server when NOT running on localhost)
nameserverUri, nameserverDaemon, broadcastServer = Pyro4.naming.startNS(host=hostname)
  
print("got a Nameserver, uri=%s" % nameserverUri)
print("ns daemon location string=%s" % nameserverDaemon.locationStr)
print("ns daemon sockets=%s" % nameserverDaemon.sockets)
if broadcastServer:
	print("bc server socket=%s (fileno %d)" % (broadcastServer.sock, broadcastServer.fileno()))
  
# create a Pyro daemon
pyrodaemon=Pyro4.core.Daemon(host=hostname)
print("daemon location string=%s" % pyrodaemon.locationStr)
print("daemon sockets=%s" % pyrodaemon.sockets)
  
# register a server object with the daemon
serveruri=pyrodaemon.register(StanfordHelpParser())
print("server uri=%s" % serveruri)
  
# register it with the embedded nameserver directly
nameserverDaemon.nameserver.register(PYRO_NAME,serveruri)
  
print("Stanford Server is running...")
  
# below is our custom event loop.
while True:
	# create sets of the socket objects we will be waiting on
	# (a set provides fast lookup compared to a list)
	nameserverSockets = set(nameserverDaemon.sockets)
	pyroSockets = set(pyrodaemon.sockets)
	rs=[]
	if broadcastServer:
		rs=[broadcastServer]  # only the broadcast server is directly usable as a select() object
	rs.extend(nameserverSockets)
	rs.extend(pyroSockets)
	rs,_,_ = select(rs,[],[],2)
	eventsForNameserver=[]
	eventsForDaemon=[]
	for s in rs:
		if s is broadcastServer:
			broadcastServer.processRequest()
		elif s in nameserverSockets:
			eventsForNameserver.append(s)
		elif s in pyroSockets:
			eventsForDaemon.append(s)
	if eventsForNameserver:
		nameserverDaemon.events(eventsForNameserver)
	if eventsForDaemon:
		pyrodaemon.events(eventsForDaemon)
nameserverDaemon.close()
if broadcastServer:
	broadcastServer.close()
pyrodaemon.close()
 | 
	apache-2.0 | 5,812,613,648,402,874,000 | 29.156863 | 92 | 0.75065 | false | 
			Subsets and Splits
				
	
				
			
				
No community queries yet
The top public SQL queries from the community will appear here once available.
